Merge from mainline (154736:156693)
[official-gcc/graphite-test-results.git] / gcc / builtins.c
blobbfff2d54135191ee17b0d0cdd810e74a5687173f
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
72 #undef DEF_BUILTIN
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
90 #endif
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 enum tree_code);
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
230 bool
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 return false;
241 /* Return true if DECL is a function symbol representing a built-in. */
243 bool
244 is_builtin_fn (tree decl)
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
254 static bool
255 called_as_built_in (tree node)
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
259 will have. */
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
271 unsigned int inner;
273 inner = max_align;
274 if (handled_component_p (exp))
276 HOST_WIDE_INT bitsize, bitpos;
277 tree offset;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
283 if (bitpos)
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
285 while (offset)
287 tree next_offset;
289 if (TREE_CODE (offset) == PLUS_EXPR)
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
294 else
295 next_offset = NULL;
296 if (host_integerp (offset, 1))
298 /* Any overflow in calculating offset_bits won't change
299 the alignment. */
300 unsigned offset_bits
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
303 if (offset_bits)
304 inner = MIN (inner, (offset_bits & -offset_bits));
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
309 /* Any overflow in calculating offset_factor won't change
310 the alignment. */
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
313 * BITS_PER_UNIT);
315 if (offset_factor)
316 inner = MIN (inner, (offset_factor & -offset_factor));
318 else
320 inner = MIN (inner, BITS_PER_UNIT);
321 break;
323 offset = next_offset;
326 if (TREE_CODE (exp) == CONST_DECL)
327 exp = DECL_INITIAL (exp);
328 if (DECL_P (exp)
329 && TREE_CODE (exp) != LABEL_DECL)
330 align = MIN (inner, DECL_ALIGN (exp));
331 #ifdef CONSTANT_ALIGNMENT
332 else if (CONSTANT_CLASS_P (exp))
333 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
334 #endif
335 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
336 || TREE_CODE (exp) == INDIRECT_REF)
337 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
338 else
339 align = MIN (align, inner);
340 return MIN (align, max_align);
343 /* Returns true iff we can trust that alignment information has been
344 calculated properly. */
346 bool
347 can_trust_pointer_alignment (void)
349 /* We rely on TER to compute accurate alignment information. */
350 return (optimize && flag_tree_ter);
353 /* Return the alignment in bits of EXP, a pointer valued expression.
354 But don't return more than MAX_ALIGN no matter what.
355 The alignment returned is, by default, the alignment of the thing that
356 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
358 Otherwise, look at the expression to see if we can do better, i.e., if the
359 expression is actually pointing at an object whose alignment is tighter. */
362 get_pointer_alignment (tree exp, unsigned int max_align)
364 unsigned int align, inner;
366 if (!can_trust_pointer_alignment ())
367 return 0;
369 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
370 return 0;
372 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
373 align = MIN (align, max_align);
375 while (1)
377 switch (TREE_CODE (exp))
379 CASE_CONVERT:
380 exp = TREE_OPERAND (exp, 0);
381 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
382 return align;
384 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
385 align = MIN (inner, max_align);
386 break;
388 case POINTER_PLUS_EXPR:
389 /* If sum of pointer + int, restrict our maximum alignment to that
390 imposed by the integer. If not, we can't do any better than
391 ALIGN. */
392 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
393 return align;
395 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
396 & (max_align / BITS_PER_UNIT - 1))
397 != 0)
398 max_align >>= 1;
400 exp = TREE_OPERAND (exp, 0);
401 break;
403 case ADDR_EXPR:
404 /* See what we are pointing at and look at its alignment. */
405 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
407 default:
408 return align;
413 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
414 way, because it could contain a zero byte in the middle.
415 TREE_STRING_LENGTH is the size of the character array, not the string.
417 ONLY_VALUE should be nonzero if the result is not going to be emitted
418 into the instruction stream and zero if it is going to be expanded.
419 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
420 is returned, otherwise NULL, since
421 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
422 evaluate the side-effects.
424 The value returned is of type `ssizetype'.
426 Unfortunately, string_constant can't access the values of const char
427 arrays with initializers, so neither can we do so here. */
429 tree
430 c_strlen (tree src, int only_value)
432 tree offset_node;
433 HOST_WIDE_INT offset;
434 int max;
435 const char *ptr;
436 location_t loc;
438 STRIP_NOPS (src);
439 if (TREE_CODE (src) == COND_EXPR
440 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
442 tree len1, len2;
444 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
445 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
446 if (tree_int_cst_equal (len1, len2))
447 return len1;
450 if (TREE_CODE (src) == COMPOUND_EXPR
451 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
452 return c_strlen (TREE_OPERAND (src, 1), only_value);
454 if (EXPR_HAS_LOCATION (src))
455 loc = EXPR_LOCATION (src);
456 else
457 loc = input_location;
459 src = string_constant (src, &offset_node);
460 if (src == 0)
461 return NULL_TREE;
463 max = TREE_STRING_LENGTH (src) - 1;
464 ptr = TREE_STRING_POINTER (src);
466 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
468 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
469 compute the offset to the following null if we don't know where to
470 start searching for it. */
471 int i;
473 for (i = 0; i < max; i++)
474 if (ptr[i] == 0)
475 return NULL_TREE;
477 /* We don't know the starting offset, but we do know that the string
478 has no internal zero bytes. We can assume that the offset falls
479 within the bounds of the string; otherwise, the programmer deserves
480 what he gets. Subtract the offset from the length of the string,
481 and return that. This would perhaps not be valid if we were dealing
482 with named arrays in addition to literal string constants. */
484 return size_diffop_loc (loc, size_int (max), offset_node);
487 /* We have a known offset into the string. Start searching there for
488 a null character if we can represent it as a single HOST_WIDE_INT. */
489 if (offset_node == 0)
490 offset = 0;
491 else if (! host_integerp (offset_node, 0))
492 offset = -1;
493 else
494 offset = tree_low_cst (offset_node, 0);
496 /* If the offset is known to be out of bounds, warn, and call strlen at
497 runtime. */
498 if (offset < 0 || offset > max)
500 /* Suppress multiple warnings for propagated constant strings. */
501 if (! TREE_NO_WARNING (src))
503 warning_at (loc, 0, "offset outside bounds of constant string");
504 TREE_NO_WARNING (src) = 1;
506 return NULL_TREE;
509 /* Use strlen to search for the first zero byte. Since any strings
510 constructed with build_string will have nulls appended, we win even
511 if we get handed something like (char[4])"abcd".
513 Since OFFSET is our starting index into the string, no further
514 calculation is needed. */
515 return ssize_int (strlen (ptr + offset));
518 /* Return a char pointer for a C string if it is a string constant
519 or sum of string constant and integer constant. */
521 static const char *
522 c_getstr (tree src)
524 tree offset_node;
526 src = string_constant (src, &offset_node);
527 if (src == 0)
528 return 0;
530 if (offset_node == 0)
531 return TREE_STRING_POINTER (src);
532 else if (!host_integerp (offset_node, 1)
533 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
534 return 0;
536 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
539 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
540 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
542 static rtx
543 c_readstr (const char *str, enum machine_mode mode)
545 HOST_WIDE_INT c[2];
546 HOST_WIDE_INT ch;
547 unsigned int i, j;
549 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
551 c[0] = 0;
552 c[1] = 0;
553 ch = 1;
554 for (i = 0; i < GET_MODE_SIZE (mode); i++)
556 j = i;
557 if (WORDS_BIG_ENDIAN)
558 j = GET_MODE_SIZE (mode) - i - 1;
559 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
560 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
561 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
562 j *= BITS_PER_UNIT;
563 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
565 if (ch)
566 ch = (unsigned char) str[i];
567 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
569 return immed_double_const (c[0], c[1], mode);
572 /* Cast a target constant CST to target CHAR and if that value fits into
573 host char type, return zero and put that value into variable pointed to by
574 P. */
576 static int
577 target_char_cast (tree cst, char *p)
579 unsigned HOST_WIDE_INT val, hostval;
581 if (!host_integerp (cst, 1)
582 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
583 return 1;
585 val = tree_low_cst (cst, 1);
586 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
587 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
589 hostval = val;
590 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
591 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
593 if (val != hostval)
594 return 1;
596 *p = hostval;
597 return 0;
600 /* Similar to save_expr, but assumes that arbitrary code is not executed
601 in between the multiple evaluations. In particular, we assume that a
602 non-addressable local variable will not be modified. */
604 static tree
605 builtin_save_expr (tree exp)
607 if (TREE_ADDRESSABLE (exp) == 0
608 && (TREE_CODE (exp) == PARM_DECL
609 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
610 return exp;
612 return save_expr (exp);
615 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
616 times to get the address of either a higher stack frame, or a return
617 address located within it (depending on FNDECL_CODE). */
619 static rtx
620 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
622 int i;
624 #ifdef INITIAL_FRAME_ADDRESS_RTX
625 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
626 #else
627 rtx tem;
629 /* For a zero count with __builtin_return_address, we don't care what
630 frame address we return, because target-specific definitions will
631 override us. Therefore frame pointer elimination is OK, and using
632 the soft frame pointer is OK.
634 For a nonzero count, or a zero count with __builtin_frame_address,
635 we require a stable offset from the current frame pointer to the
636 previous one, so we must use the hard frame pointer, and
637 we must disable frame pointer elimination. */
638 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
639 tem = frame_pointer_rtx;
640 else
642 tem = hard_frame_pointer_rtx;
644 /* Tell reload not to eliminate the frame pointer. */
645 crtl->accesses_prior_frames = 1;
647 #endif
649 /* Some machines need special handling before we can access
650 arbitrary frames. For example, on the SPARC, we must first flush
651 all register windows to the stack. */
652 #ifdef SETUP_FRAME_ADDRESSES
653 if (count > 0)
654 SETUP_FRAME_ADDRESSES ();
655 #endif
657 /* On the SPARC, the return address is not in the frame, it is in a
658 register. There is no way to access it off of the current frame
659 pointer, but it can be accessed off the previous frame pointer by
660 reading the value from the register window save area. */
661 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
662 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
663 count--;
664 #endif
666 /* Scan back COUNT frames to the specified frame. */
667 for (i = 0; i < count; i++)
669 /* Assume the dynamic chain pointer is in the word that the
670 frame address points to, unless otherwise specified. */
671 #ifdef DYNAMIC_CHAIN_ADDRESS
672 tem = DYNAMIC_CHAIN_ADDRESS (tem);
673 #endif
674 tem = memory_address (Pmode, tem);
675 tem = gen_frame_mem (Pmode, tem);
676 tem = copy_to_reg (tem);
679 /* For __builtin_frame_address, return what we've got. But, on
680 the SPARC for example, we may have to add a bias. */
681 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
682 #ifdef FRAME_ADDR_RTX
683 return FRAME_ADDR_RTX (tem);
684 #else
685 return tem;
686 #endif
688 /* For __builtin_return_address, get the return address from that frame. */
689 #ifdef RETURN_ADDR_RTX
690 tem = RETURN_ADDR_RTX (count, tem);
691 #else
692 tem = memory_address (Pmode,
693 plus_constant (tem, GET_MODE_SIZE (Pmode)));
694 tem = gen_frame_mem (Pmode, tem);
695 #endif
696 return tem;
699 /* Alias set used for setjmp buffer. */
700 static alias_set_type setjmp_alias_set = -1;
702 /* Construct the leading half of a __builtin_setjmp call. Control will
703 return to RECEIVER_LABEL. This is also called directly by the SJLJ
704 exception handling code. */
706 void
707 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
709 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
710 rtx stack_save;
711 rtx mem;
713 if (setjmp_alias_set == -1)
714 setjmp_alias_set = new_alias_set ();
716 buf_addr = convert_memory_address (Pmode, buf_addr);
718 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
720 /* We store the frame pointer and the address of receiver_label in
721 the buffer and use the rest of it for the stack save area, which
722 is machine-dependent. */
724 mem = gen_rtx_MEM (Pmode, buf_addr);
725 set_mem_alias_set (mem, setjmp_alias_set);
726 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
728 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
729 set_mem_alias_set (mem, setjmp_alias_set);
731 emit_move_insn (validize_mem (mem),
732 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
734 stack_save = gen_rtx_MEM (sa_mode,
735 plus_constant (buf_addr,
736 2 * GET_MODE_SIZE (Pmode)));
737 set_mem_alias_set (stack_save, setjmp_alias_set);
738 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
740 /* If there is further processing to do, do it. */
741 #ifdef HAVE_builtin_setjmp_setup
742 if (HAVE_builtin_setjmp_setup)
743 emit_insn (gen_builtin_setjmp_setup (buf_addr));
744 #endif
746 /* Tell optimize_save_area_alloca that extra work is going to
747 need to go on during alloca. */
748 cfun->calls_setjmp = 1;
750 /* We have a nonlocal label. */
751 cfun->has_nonlocal_label = 1;
754 /* Construct the trailing part of a __builtin_setjmp call. This is
755 also called directly by the SJLJ exception handling code. */
757 void
758 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
760 rtx chain;
762 /* Clobber the FP when we get here, so we have to make sure it's
763 marked as used by this function. */
764 emit_use (hard_frame_pointer_rtx);
766 /* Mark the static chain as clobbered here so life information
767 doesn't get messed up for it. */
768 chain = targetm.calls.static_chain (current_function_decl, true);
769 if (chain && REG_P (chain))
770 emit_clobber (chain);
772 /* Now put in the code to restore the frame pointer, and argument
773 pointer, if needed. */
774 #ifdef HAVE_nonlocal_goto
775 if (! HAVE_nonlocal_goto)
776 #endif
778 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
779 /* This might change the hard frame pointer in ways that aren't
780 apparent to early optimization passes, so force a clobber. */
781 emit_clobber (hard_frame_pointer_rtx);
784 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
785 if (fixed_regs[ARG_POINTER_REGNUM])
787 #ifdef ELIMINABLE_REGS
788 size_t i;
789 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
791 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
792 if (elim_regs[i].from == ARG_POINTER_REGNUM
793 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
794 break;
796 if (i == ARRAY_SIZE (elim_regs))
797 #endif
799 /* Now restore our arg pointer from the address at which it
800 was saved in our stack frame. */
801 emit_move_insn (crtl->args.internal_arg_pointer,
802 copy_to_reg (get_arg_pointer_save_area ()));
805 #endif
807 #ifdef HAVE_builtin_setjmp_receiver
808 if (HAVE_builtin_setjmp_receiver)
809 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
810 else
811 #endif
812 #ifdef HAVE_nonlocal_goto_receiver
813 if (HAVE_nonlocal_goto_receiver)
814 emit_insn (gen_nonlocal_goto_receiver ());
815 else
816 #endif
817 { /* Nothing */ }
819 /* We must not allow the code we just generated to be reordered by
820 scheduling. Specifically, the update of the frame pointer must
821 happen immediately, not later. */
822 emit_insn (gen_blockage ());
825 /* __builtin_longjmp is passed a pointer to an array of five words (not
826 all will be used on all machines). It operates similarly to the C
827 library function of the same name, but is more efficient. Much of
828 the code below is copied from the handling of non-local gotos. */
830 static void
831 expand_builtin_longjmp (rtx buf_addr, rtx value)
833 rtx fp, lab, stack, insn, last;
834 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
836 /* DRAP is needed for stack realign if longjmp is expanded to current
837 function */
838 if (SUPPORTS_STACK_ALIGNMENT)
839 crtl->need_drap = true;
841 if (setjmp_alias_set == -1)
842 setjmp_alias_set = new_alias_set ();
844 buf_addr = convert_memory_address (Pmode, buf_addr);
846 buf_addr = force_reg (Pmode, buf_addr);
848 /* We require that the user must pass a second argument of 1, because
849 that is what builtin_setjmp will return. */
850 gcc_assert (value == const1_rtx);
852 last = get_last_insn ();
853 #ifdef HAVE_builtin_longjmp
854 if (HAVE_builtin_longjmp)
855 emit_insn (gen_builtin_longjmp (buf_addr));
856 else
857 #endif
859 fp = gen_rtx_MEM (Pmode, buf_addr);
860 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
861 GET_MODE_SIZE (Pmode)));
863 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
864 2 * GET_MODE_SIZE (Pmode)));
865 set_mem_alias_set (fp, setjmp_alias_set);
866 set_mem_alias_set (lab, setjmp_alias_set);
867 set_mem_alias_set (stack, setjmp_alias_set);
869 /* Pick up FP, label, and SP from the block and jump. This code is
870 from expand_goto in stmt.c; see there for detailed comments. */
871 #ifdef HAVE_nonlocal_goto
872 if (HAVE_nonlocal_goto)
873 /* We have to pass a value to the nonlocal_goto pattern that will
874 get copied into the static_chain pointer, but it does not matter
875 what that value is, because builtin_setjmp does not use it. */
876 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
877 else
878 #endif
880 lab = copy_to_reg (lab);
882 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
883 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
885 emit_move_insn (hard_frame_pointer_rtx, fp);
886 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
888 emit_use (hard_frame_pointer_rtx);
889 emit_use (stack_pointer_rtx);
890 emit_indirect_jump (lab);
894 /* Search backwards and mark the jump insn as a non-local goto.
895 Note that this precludes the use of __builtin_longjmp to a
896 __builtin_setjmp target in the same function. However, we've
897 already cautioned the user that these functions are for
898 internal exception handling use only. */
899 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
901 gcc_assert (insn != last);
903 if (JUMP_P (insn))
905 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
906 break;
908 else if (CALL_P (insn))
909 break;
913 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
914 and the address of the save area. */
916 static rtx
917 expand_builtin_nonlocal_goto (tree exp)
919 tree t_label, t_save_area;
920 rtx r_label, r_save_area, r_fp, r_sp, insn;
922 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
923 return NULL_RTX;
925 t_label = CALL_EXPR_ARG (exp, 0);
926 t_save_area = CALL_EXPR_ARG (exp, 1);
928 r_label = expand_normal (t_label);
929 r_label = convert_memory_address (Pmode, r_label);
930 r_save_area = expand_normal (t_save_area);
931 r_save_area = convert_memory_address (Pmode, r_save_area);
932 /* Copy the address of the save location to a register just in case it was based
933 on the frame pointer. */
934 r_save_area = copy_to_reg (r_save_area);
935 r_fp = gen_rtx_MEM (Pmode, r_save_area);
936 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
937 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
939 crtl->has_nonlocal_goto = 1;
941 #ifdef HAVE_nonlocal_goto
942 /* ??? We no longer need to pass the static chain value, afaik. */
943 if (HAVE_nonlocal_goto)
944 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
945 else
946 #endif
948 r_label = copy_to_reg (r_label);
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
953 /* Restore frame pointer for containing function.
954 This sets the actual hard register used for the frame pointer
955 to the location of the function's incoming static chain info.
956 The non-local goto handler will then adjust it to contain the
957 proper value and reload the argument pointer, if needed. */
958 emit_move_insn (hard_frame_pointer_rtx, r_fp);
959 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
961 /* USE of hard_frame_pointer_rtx added for consistency;
962 not clear if really needed. */
963 emit_use (hard_frame_pointer_rtx);
964 emit_use (stack_pointer_rtx);
966 /* If the architecture is using a GP register, we must
967 conservatively assume that the target function makes use of it.
968 The prologue of functions with nonlocal gotos must therefore
969 initialize the GP register to the appropriate value, and we
970 must then make sure that this value is live at the point
971 of the jump. (Note that this doesn't necessarily apply
972 to targets with a nonlocal_goto pattern; they are free
973 to implement it in their own way. Note also that this is
974 a no-op if the GP register is a global invariant.) */
975 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
976 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
977 emit_use (pic_offset_table_rtx);
979 emit_indirect_jump (r_label);
982 /* Search backwards to the jump insn and mark it as a
983 non-local goto. */
984 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
986 if (JUMP_P (insn))
988 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
989 break;
991 else if (CALL_P (insn))
992 break;
995 return const0_rtx;
998 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
999 (not all will be used on all machines) that was passed to __builtin_setjmp.
1000 It updates the stack pointer in that block to correspond to the current
1001 stack pointer. */
1003 static void
1004 expand_builtin_update_setjmp_buf (rtx buf_addr)
1006 enum machine_mode sa_mode = Pmode;
1007 rtx stack_save;
1010 #ifdef HAVE_save_stack_nonlocal
1011 if (HAVE_save_stack_nonlocal)
1012 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1013 #endif
1014 #ifdef STACK_SAVEAREA_MODE
1015 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1016 #endif
1018 stack_save
1019 = gen_rtx_MEM (sa_mode,
1020 memory_address
1021 (sa_mode,
1022 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1024 #ifdef HAVE_setjmp
1025 if (HAVE_setjmp)
1026 emit_insn (gen_setjmp ());
1027 #endif
1029 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1032 /* Expand a call to __builtin_prefetch. For a target that does not support
1033 data prefetch, evaluate the memory address argument in case it has side
1034 effects. */
1036 static void
1037 expand_builtin_prefetch (tree exp)
1039 tree arg0, arg1, arg2;
1040 int nargs;
1041 rtx op0, op1, op2;
1043 if (!validate_arglist (exp, POINTER_TYPE, 0))
1044 return;
1046 arg0 = CALL_EXPR_ARG (exp, 0);
1048 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1049 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1050 locality). */
1051 nargs = call_expr_nargs (exp);
1052 if (nargs > 1)
1053 arg1 = CALL_EXPR_ARG (exp, 1);
1054 else
1055 arg1 = integer_zero_node;
1056 if (nargs > 2)
1057 arg2 = CALL_EXPR_ARG (exp, 2);
1058 else
1059 arg2 = build_int_cst (NULL_TREE, 3);
1061 /* Argument 0 is an address. */
1062 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1064 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1065 if (TREE_CODE (arg1) != INTEGER_CST)
1067 error ("second argument to %<__builtin_prefetch%> must be a constant");
1068 arg1 = integer_zero_node;
1070 op1 = expand_normal (arg1);
1071 /* Argument 1 must be either zero or one. */
1072 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1074 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1075 " using zero");
1076 op1 = const0_rtx;
1079 /* Argument 2 (locality) must be a compile-time constant int. */
1080 if (TREE_CODE (arg2) != INTEGER_CST)
1082 error ("third argument to %<__builtin_prefetch%> must be a constant");
1083 arg2 = integer_zero_node;
1085 op2 = expand_normal (arg2);
1086 /* Argument 2 must be 0, 1, 2, or 3. */
1087 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1089 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1090 op2 = const0_rtx;
1093 #ifdef HAVE_prefetch
1094 if (HAVE_prefetch)
1096 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1097 (op0,
1098 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1099 || (GET_MODE (op0) != Pmode))
1101 op0 = convert_memory_address (Pmode, op0);
1102 op0 = force_reg (Pmode, op0);
1104 emit_insn (gen_prefetch (op0, op1, op2));
1106 #endif
1108 /* Don't do anything with direct references to volatile memory, but
1109 generate code to handle other side effects. */
1110 if (!MEM_P (op0) && side_effects_p (op0))
1111 emit_insn (op0);
1114 /* Get a MEM rtx for expression EXP which is the address of an operand
1115 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1116 the maximum length of the block of memory that might be accessed or
1117 NULL if unknown. */
1119 static rtx
1120 get_memory_rtx (tree exp, tree len)
1122 tree orig_exp = exp;
1123 rtx addr, mem;
1124 HOST_WIDE_INT off;
1126 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1127 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1128 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1129 exp = TREE_OPERAND (exp, 0);
1131 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1132 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1134 /* Get an expression we can use to find the attributes to assign to MEM.
1135 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1136 we can. First remove any nops. */
1137 while (CONVERT_EXPR_P (exp)
1138 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1139 exp = TREE_OPERAND (exp, 0);
1141 off = 0;
1142 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1143 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1144 && host_integerp (TREE_OPERAND (exp, 1), 0)
1145 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1146 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1147 else if (TREE_CODE (exp) == ADDR_EXPR)
1148 exp = TREE_OPERAND (exp, 0);
1149 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1150 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1151 else
1152 exp = NULL;
1154 /* Honor attributes derived from exp, except for the alias set
1155 (as builtin stringops may alias with anything) and the size
1156 (as stringops may access multiple array elements). */
1157 if (exp)
1159 set_mem_attributes (mem, exp, 0);
1161 if (off)
1162 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1164 /* Allow the string and memory builtins to overflow from one
1165 field into another, see http://gcc.gnu.org/PR23561.
1166 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1167 memory accessed by the string or memory builtin will fit
1168 within the field. */
1169 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1171 tree mem_expr = MEM_EXPR (mem);
1172 HOST_WIDE_INT offset = -1, length = -1;
1173 tree inner = exp;
1175 while (TREE_CODE (inner) == ARRAY_REF
1176 || CONVERT_EXPR_P (inner)
1177 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1178 || TREE_CODE (inner) == SAVE_EXPR)
1179 inner = TREE_OPERAND (inner, 0);
1181 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1183 if (MEM_OFFSET (mem)
1184 && CONST_INT_P (MEM_OFFSET (mem)))
1185 offset = INTVAL (MEM_OFFSET (mem));
1187 if (offset >= 0 && len && host_integerp (len, 0))
1188 length = tree_low_cst (len, 0);
1190 while (TREE_CODE (inner) == COMPONENT_REF)
1192 tree field = TREE_OPERAND (inner, 1);
1193 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1194 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1196 /* Bitfields are generally not byte-addressable. */
1197 gcc_assert (!DECL_BIT_FIELD (field)
1198 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199 % BITS_PER_UNIT) == 0
1200 && host_integerp (DECL_SIZE (field), 0)
1201 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1202 % BITS_PER_UNIT) == 0));
1204 /* If we can prove that the memory starting at XEXP (mem, 0) and
1205 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1206 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1207 fields without DECL_SIZE_UNIT like flexible array members. */
1208 if (length >= 0
1209 && DECL_SIZE_UNIT (field)
1210 && host_integerp (DECL_SIZE_UNIT (field), 0))
1212 HOST_WIDE_INT size
1213 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1214 if (offset <= size
1215 && length <= size
1216 && offset + length <= size)
1217 break;
1220 if (offset >= 0
1221 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1222 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1223 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1224 / BITS_PER_UNIT;
1225 else
1227 offset = -1;
1228 length = -1;
1231 mem_expr = TREE_OPERAND (mem_expr, 0);
1232 inner = TREE_OPERAND (inner, 0);
1235 if (mem_expr == NULL)
1236 offset = -1;
1237 if (mem_expr != MEM_EXPR (mem))
1239 set_mem_expr (mem, mem_expr);
1240 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1243 set_mem_alias_set (mem, 0);
1244 set_mem_size (mem, NULL_RTX);
1247 return mem;
1250 /* Built-in functions to perform an untyped call and return. */
1252 /* For each register that may be used for calling a function, this
1253 gives a mode used to copy the register's value. VOIDmode indicates
1254 the register is not used for calling a function. If the machine
1255 has register windows, this gives only the outbound registers.
1256 INCOMING_REGNO gives the corresponding inbound register. */
1257 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1259 /* For each register that may be used for returning values, this gives
1260 a mode used to copy the register's value. VOIDmode indicates the
1261 register is not used for returning values. If the machine has
1262 register windows, this gives only the outbound registers.
1263 INCOMING_REGNO gives the corresponding inbound register. */
1264 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1266 /* Return the size required for the block returned by __builtin_apply_args,
1267 and initialize apply_args_mode. */
1269 static int
1270 apply_args_size (void)
1272 static int size = -1;
1273 int align;
1274 unsigned int regno;
1275 enum machine_mode mode;
1277 /* The values computed by this function never change. */
1278 if (size < 0)
1280 /* The first value is the incoming arg-pointer. */
1281 size = GET_MODE_SIZE (Pmode);
1283 /* The second value is the structure value address unless this is
1284 passed as an "invisible" first argument. */
1285 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1286 size += GET_MODE_SIZE (Pmode);
1288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1289 if (FUNCTION_ARG_REGNO_P (regno))
1291 mode = reg_raw_mode[regno];
1293 gcc_assert (mode != VOIDmode);
1295 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1296 if (size % align != 0)
1297 size = CEIL (size, align) * align;
1298 size += GET_MODE_SIZE (mode);
1299 apply_args_mode[regno] = mode;
1301 else
1303 apply_args_mode[regno] = VOIDmode;
1306 return size;
1309 /* Return the size required for the block returned by __builtin_apply,
1310 and initialize apply_result_mode. */
1312 static int
1313 apply_result_size (void)
1315 static int size = -1;
1316 int align, regno;
1317 enum machine_mode mode;
1319 /* The values computed by this function never change. */
1320 if (size < 0)
1322 size = 0;
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if (FUNCTION_VALUE_REGNO_P (regno))
1327 mode = reg_raw_mode[regno];
1329 gcc_assert (mode != VOIDmode);
1331 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1332 if (size % align != 0)
1333 size = CEIL (size, align) * align;
1334 size += GET_MODE_SIZE (mode);
1335 apply_result_mode[regno] = mode;
1337 else
1338 apply_result_mode[regno] = VOIDmode;
1340 /* Allow targets that use untyped_call and untyped_return to override
1341 the size so that machine-specific information can be stored here. */
1342 #ifdef APPLY_RESULT_SIZE
1343 size = APPLY_RESULT_SIZE;
1344 #endif
1346 return size;
1349 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1350 /* Create a vector describing the result block RESULT. If SAVEP is true,
1351 the result block is used to save the values; otherwise it is used to
1352 restore the values. */
1354 static rtx
1355 result_vector (int savep, rtx result)
1357 int regno, size, align, nelts;
1358 enum machine_mode mode;
1359 rtx reg, mem;
1360 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1362 size = nelts = 0;
1363 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1364 if ((mode = apply_result_mode[regno]) != VOIDmode)
1366 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1367 if (size % align != 0)
1368 size = CEIL (size, align) * align;
1369 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1370 mem = adjust_address (result, mode, size);
1371 savevec[nelts++] = (savep
1372 ? gen_rtx_SET (VOIDmode, mem, reg)
1373 : gen_rtx_SET (VOIDmode, reg, mem));
1374 size += GET_MODE_SIZE (mode);
1376 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1378 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1380 /* Save the state required to perform an untyped call with the same
1381 arguments as were passed to the current function. */
1383 static rtx
1384 expand_builtin_apply_args_1 (void)
1386 rtx registers, tem;
1387 int size, align, regno;
1388 enum machine_mode mode;
1389 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1391 /* Create a block where the arg-pointer, structure value address,
1392 and argument registers can be saved. */
1393 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1395 /* Walk past the arg-pointer and structure value address. */
1396 size = GET_MODE_SIZE (Pmode);
1397 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1398 size += GET_MODE_SIZE (Pmode);
1400 /* Save each register used in calling a function to the block. */
1401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402 if ((mode = apply_args_mode[regno]) != VOIDmode)
1404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1405 if (size % align != 0)
1406 size = CEIL (size, align) * align;
1408 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1410 emit_move_insn (adjust_address (registers, mode, size), tem);
1411 size += GET_MODE_SIZE (mode);
1414 /* Save the arg pointer to the block. */
1415 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1416 #ifdef STACK_GROWS_DOWNWARD
1417 /* We need the pointer as the caller actually passed them to us, not
1418 as we might have pretended they were passed. Make sure it's a valid
1419 operand, as emit_move_insn isn't expected to handle a PLUS. */
1421 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1422 NULL_RTX);
1423 #endif
1424 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1426 size = GET_MODE_SIZE (Pmode);
1428 /* Save the structure value address unless this is passed as an
1429 "invisible" first argument. */
1430 if (struct_incoming_value)
1432 emit_move_insn (adjust_address (registers, Pmode, size),
1433 copy_to_reg (struct_incoming_value));
1434 size += GET_MODE_SIZE (Pmode);
1437 /* Return the address of the block. */
1438 return copy_addr_to_reg (XEXP (registers, 0));
1441 /* __builtin_apply_args returns block of memory allocated on
1442 the stack into which is stored the arg pointer, structure
1443 value address, static chain, and all the registers that might
1444 possibly be used in performing a function call. The code is
1445 moved to the start of the function so the incoming values are
1446 saved. */
1448 static rtx
1449 expand_builtin_apply_args (void)
1451 /* Don't do __builtin_apply_args more than once in a function.
1452 Save the result of the first call and reuse it. */
1453 if (apply_args_value != 0)
1454 return apply_args_value;
1456 /* When this function is called, it means that registers must be
1457 saved on entry to this function. So we migrate the
1458 call to the first insn of this function. */
1459 rtx temp;
1460 rtx seq;
1462 start_sequence ();
1463 temp = expand_builtin_apply_args_1 ();
1464 seq = get_insns ();
1465 end_sequence ();
1467 apply_args_value = temp;
1469 /* Put the insns after the NOTE that starts the function.
1470 If this is inside a start_sequence, make the outer-level insn
1471 chain current, so the code is placed at the start of the
1472 function. If internal_arg_pointer is a non-virtual pseudo,
1473 it needs to be placed after the function that initializes
1474 that pseudo. */
1475 push_topmost_sequence ();
1476 if (REG_P (crtl->args.internal_arg_pointer)
1477 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1478 emit_insn_before (seq, parm_birth_insn);
1479 else
1480 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1481 pop_topmost_sequence ();
1482 return temp;
1486 /* Perform an untyped call and save the state required to perform an
1487 untyped return of whatever value was returned by the given function. */
1489 static rtx
1490 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1492 int size, align, regno;
1493 enum machine_mode mode;
1494 rtx incoming_args, result, reg, dest, src, call_insn;
1495 rtx old_stack_level = 0;
1496 rtx call_fusage = 0;
1497 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1499 arguments = convert_memory_address (Pmode, arguments);
1501 /* Create a block where the return registers can be saved. */
1502 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1504 /* Fetch the arg pointer from the ARGUMENTS block. */
1505 incoming_args = gen_reg_rtx (Pmode);
1506 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1507 #ifndef STACK_GROWS_DOWNWARD
1508 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1509 incoming_args, 0, OPTAB_LIB_WIDEN);
1510 #endif
1512 /* Push a new argument block and copy the arguments. Do not allow
1513 the (potential) memcpy call below to interfere with our stack
1514 manipulations. */
1515 do_pending_stack_adjust ();
1516 NO_DEFER_POP;
1518 /* Save the stack with nonlocal if available. */
1519 #ifdef HAVE_save_stack_nonlocal
1520 if (HAVE_save_stack_nonlocal)
1521 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1522 else
1523 #endif
1524 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1526 /* Allocate a block of memory onto the stack and copy the memory
1527 arguments to the outgoing arguments address. */
1528 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1530 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1531 may have already set current_function_calls_alloca to true.
1532 current_function_calls_alloca won't be set if argsize is zero,
1533 so we have to guarantee need_drap is true here. */
1534 if (SUPPORTS_STACK_ALIGNMENT)
1535 crtl->need_drap = true;
1537 dest = virtual_outgoing_args_rtx;
1538 #ifndef STACK_GROWS_DOWNWARD
1539 if (CONST_INT_P (argsize))
1540 dest = plus_constant (dest, -INTVAL (argsize));
1541 else
1542 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1543 #endif
1544 dest = gen_rtx_MEM (BLKmode, dest);
1545 set_mem_align (dest, PARM_BOUNDARY);
1546 src = gen_rtx_MEM (BLKmode, incoming_args);
1547 set_mem_align (src, PARM_BOUNDARY);
1548 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1550 /* Refer to the argument block. */
1551 apply_args_size ();
1552 arguments = gen_rtx_MEM (BLKmode, arguments);
1553 set_mem_align (arguments, PARM_BOUNDARY);
1555 /* Walk past the arg-pointer and structure value address. */
1556 size = GET_MODE_SIZE (Pmode);
1557 if (struct_value)
1558 size += GET_MODE_SIZE (Pmode);
1560 /* Restore each of the registers previously saved. Make USE insns
1561 for each of these registers for use in making the call. */
1562 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1563 if ((mode = apply_args_mode[regno]) != VOIDmode)
1565 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1566 if (size % align != 0)
1567 size = CEIL (size, align) * align;
1568 reg = gen_rtx_REG (mode, regno);
1569 emit_move_insn (reg, adjust_address (arguments, mode, size));
1570 use_reg (&call_fusage, reg);
1571 size += GET_MODE_SIZE (mode);
1574 /* Restore the structure value address unless this is passed as an
1575 "invisible" first argument. */
1576 size = GET_MODE_SIZE (Pmode);
1577 if (struct_value)
1579 rtx value = gen_reg_rtx (Pmode);
1580 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1581 emit_move_insn (struct_value, value);
1582 if (REG_P (struct_value))
1583 use_reg (&call_fusage, struct_value);
1584 size += GET_MODE_SIZE (Pmode);
1587 /* All arguments and registers used for the call are set up by now! */
1588 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1590 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1591 and we don't want to load it into a register as an optimization,
1592 because prepare_call_address already did it if it should be done. */
1593 if (GET_CODE (function) != SYMBOL_REF)
1594 function = memory_address (FUNCTION_MODE, function);
1596 /* Generate the actual call instruction and save the return value. */
1597 #ifdef HAVE_untyped_call
1598 if (HAVE_untyped_call)
1599 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1600 result, result_vector (1, result)));
1601 else
1602 #endif
1603 #ifdef HAVE_call_value
1604 if (HAVE_call_value)
1606 rtx valreg = 0;
1608 /* Locate the unique return register. It is not possible to
1609 express a call that sets more than one return register using
1610 call_value; use untyped_call for that. In fact, untyped_call
1611 only needs to save the return registers in the given block. */
1612 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1613 if ((mode = apply_result_mode[regno]) != VOIDmode)
1615 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1617 valreg = gen_rtx_REG (mode, regno);
1620 emit_call_insn (GEN_CALL_VALUE (valreg,
1621 gen_rtx_MEM (FUNCTION_MODE, function),
1622 const0_rtx, NULL_RTX, const0_rtx));
1624 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1626 else
1627 #endif
1628 gcc_unreachable ();
1630 /* Find the CALL insn we just emitted, and attach the register usage
1631 information. */
1632 call_insn = last_call_insn ();
1633 add_function_usage_to (call_insn, call_fusage);
1635 /* Restore the stack. */
1636 #ifdef HAVE_save_stack_nonlocal
1637 if (HAVE_save_stack_nonlocal)
1638 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1639 else
1640 #endif
1641 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1643 OK_DEFER_POP;
1645 /* Return the address of the result block. */
1646 result = copy_addr_to_reg (XEXP (result, 0));
1647 return convert_memory_address (ptr_mode, result);
1650 /* Perform an untyped return. */
1652 static void
1653 expand_builtin_return (rtx result)
1655 int size, align, regno;
1656 enum machine_mode mode;
1657 rtx reg;
1658 rtx call_fusage = 0;
1660 result = convert_memory_address (Pmode, result);
1662 apply_result_size ();
1663 result = gen_rtx_MEM (BLKmode, result);
1665 #ifdef HAVE_untyped_return
1666 if (HAVE_untyped_return)
1668 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1669 emit_barrier ();
1670 return;
1672 #endif
1674 /* Restore the return value and note that each value is used. */
1675 size = 0;
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1679 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1680 if (size % align != 0)
1681 size = CEIL (size, align) * align;
1682 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1683 emit_move_insn (reg, adjust_address (result, mode, size));
1685 push_to_sequence (call_fusage);
1686 emit_use (reg);
1687 call_fusage = get_insns ();
1688 end_sequence ();
1689 size += GET_MODE_SIZE (mode);
1692 /* Put the USE insns before the return. */
1693 emit_insn (call_fusage);
1695 /* Return whatever values was restored by jumping directly to the end
1696 of the function. */
1697 expand_naked_return ();
1700 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1702 static enum type_class
1703 type_to_class (tree type)
1705 switch (TREE_CODE (type))
1707 case VOID_TYPE: return void_type_class;
1708 case INTEGER_TYPE: return integer_type_class;
1709 case ENUMERAL_TYPE: return enumeral_type_class;
1710 case BOOLEAN_TYPE: return boolean_type_class;
1711 case POINTER_TYPE: return pointer_type_class;
1712 case REFERENCE_TYPE: return reference_type_class;
1713 case OFFSET_TYPE: return offset_type_class;
1714 case REAL_TYPE: return real_type_class;
1715 case COMPLEX_TYPE: return complex_type_class;
1716 case FUNCTION_TYPE: return function_type_class;
1717 case METHOD_TYPE: return method_type_class;
1718 case RECORD_TYPE: return record_type_class;
1719 case UNION_TYPE:
1720 case QUAL_UNION_TYPE: return union_type_class;
1721 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1722 ? string_type_class : array_type_class);
1723 case LANG_TYPE: return lang_type_class;
1724 default: return no_type_class;
1728 /* Expand a call EXP to __builtin_classify_type. */
1730 static rtx
1731 expand_builtin_classify_type (tree exp)
1733 if (call_expr_nargs (exp))
1734 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1735 return GEN_INT (no_type_class);
1738 /* This helper macro, meant to be used in mathfn_built_in below,
1739 determines which among a set of three builtin math functions is
1740 appropriate for a given type mode. The `F' and `L' cases are
1741 automatically generated from the `double' case. */
1742 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1743 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1744 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1745 fcodel = BUILT_IN_MATHFN##L ; break;
1746 /* Similar to above, but appends _R after any F/L suffix. */
1747 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1748 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1749 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1750 fcodel = BUILT_IN_MATHFN##L_R ; break;
1752 /* Return mathematic function equivalent to FN but operating directly
1753 on TYPE, if available. If IMPLICIT is true find the function in
1754 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1755 can't do the conversion, return zero. */
1757 static tree
1758 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1760 tree const *const fn_arr
1761 = implicit ? implicit_built_in_decls : built_in_decls;
1762 enum built_in_function fcode, fcodef, fcodel;
1764 switch (fn)
1766 CASE_MATHFN (BUILT_IN_ACOS)
1767 CASE_MATHFN (BUILT_IN_ACOSH)
1768 CASE_MATHFN (BUILT_IN_ASIN)
1769 CASE_MATHFN (BUILT_IN_ASINH)
1770 CASE_MATHFN (BUILT_IN_ATAN)
1771 CASE_MATHFN (BUILT_IN_ATAN2)
1772 CASE_MATHFN (BUILT_IN_ATANH)
1773 CASE_MATHFN (BUILT_IN_CBRT)
1774 CASE_MATHFN (BUILT_IN_CEIL)
1775 CASE_MATHFN (BUILT_IN_CEXPI)
1776 CASE_MATHFN (BUILT_IN_COPYSIGN)
1777 CASE_MATHFN (BUILT_IN_COS)
1778 CASE_MATHFN (BUILT_IN_COSH)
1779 CASE_MATHFN (BUILT_IN_DREM)
1780 CASE_MATHFN (BUILT_IN_ERF)
1781 CASE_MATHFN (BUILT_IN_ERFC)
1782 CASE_MATHFN (BUILT_IN_EXP)
1783 CASE_MATHFN (BUILT_IN_EXP10)
1784 CASE_MATHFN (BUILT_IN_EXP2)
1785 CASE_MATHFN (BUILT_IN_EXPM1)
1786 CASE_MATHFN (BUILT_IN_FABS)
1787 CASE_MATHFN (BUILT_IN_FDIM)
1788 CASE_MATHFN (BUILT_IN_FLOOR)
1789 CASE_MATHFN (BUILT_IN_FMA)
1790 CASE_MATHFN (BUILT_IN_FMAX)
1791 CASE_MATHFN (BUILT_IN_FMIN)
1792 CASE_MATHFN (BUILT_IN_FMOD)
1793 CASE_MATHFN (BUILT_IN_FREXP)
1794 CASE_MATHFN (BUILT_IN_GAMMA)
1795 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1796 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1797 CASE_MATHFN (BUILT_IN_HYPOT)
1798 CASE_MATHFN (BUILT_IN_ILOGB)
1799 CASE_MATHFN (BUILT_IN_INF)
1800 CASE_MATHFN (BUILT_IN_ISINF)
1801 CASE_MATHFN (BUILT_IN_J0)
1802 CASE_MATHFN (BUILT_IN_J1)
1803 CASE_MATHFN (BUILT_IN_JN)
1804 CASE_MATHFN (BUILT_IN_LCEIL)
1805 CASE_MATHFN (BUILT_IN_LDEXP)
1806 CASE_MATHFN (BUILT_IN_LFLOOR)
1807 CASE_MATHFN (BUILT_IN_LGAMMA)
1808 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1809 CASE_MATHFN (BUILT_IN_LLCEIL)
1810 CASE_MATHFN (BUILT_IN_LLFLOOR)
1811 CASE_MATHFN (BUILT_IN_LLRINT)
1812 CASE_MATHFN (BUILT_IN_LLROUND)
1813 CASE_MATHFN (BUILT_IN_LOG)
1814 CASE_MATHFN (BUILT_IN_LOG10)
1815 CASE_MATHFN (BUILT_IN_LOG1P)
1816 CASE_MATHFN (BUILT_IN_LOG2)
1817 CASE_MATHFN (BUILT_IN_LOGB)
1818 CASE_MATHFN (BUILT_IN_LRINT)
1819 CASE_MATHFN (BUILT_IN_LROUND)
1820 CASE_MATHFN (BUILT_IN_MODF)
1821 CASE_MATHFN (BUILT_IN_NAN)
1822 CASE_MATHFN (BUILT_IN_NANS)
1823 CASE_MATHFN (BUILT_IN_NEARBYINT)
1824 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1825 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1826 CASE_MATHFN (BUILT_IN_POW)
1827 CASE_MATHFN (BUILT_IN_POWI)
1828 CASE_MATHFN (BUILT_IN_POW10)
1829 CASE_MATHFN (BUILT_IN_REMAINDER)
1830 CASE_MATHFN (BUILT_IN_REMQUO)
1831 CASE_MATHFN (BUILT_IN_RINT)
1832 CASE_MATHFN (BUILT_IN_ROUND)
1833 CASE_MATHFN (BUILT_IN_SCALB)
1834 CASE_MATHFN (BUILT_IN_SCALBLN)
1835 CASE_MATHFN (BUILT_IN_SCALBN)
1836 CASE_MATHFN (BUILT_IN_SIGNBIT)
1837 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1838 CASE_MATHFN (BUILT_IN_SIN)
1839 CASE_MATHFN (BUILT_IN_SINCOS)
1840 CASE_MATHFN (BUILT_IN_SINH)
1841 CASE_MATHFN (BUILT_IN_SQRT)
1842 CASE_MATHFN (BUILT_IN_TAN)
1843 CASE_MATHFN (BUILT_IN_TANH)
1844 CASE_MATHFN (BUILT_IN_TGAMMA)
1845 CASE_MATHFN (BUILT_IN_TRUNC)
1846 CASE_MATHFN (BUILT_IN_Y0)
1847 CASE_MATHFN (BUILT_IN_Y1)
1848 CASE_MATHFN (BUILT_IN_YN)
1850 default:
1851 return NULL_TREE;
1854 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1855 return fn_arr[fcode];
1856 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1857 return fn_arr[fcodef];
1858 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1859 return fn_arr[fcodel];
1860 else
1861 return NULL_TREE;
1864 /* Like mathfn_built_in_1(), but always use the implicit array. */
1866 tree
1867 mathfn_built_in (tree type, enum built_in_function fn)
1869 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1872 /* If errno must be maintained, expand the RTL to check if the result,
1873 TARGET, of a built-in function call, EXP, is NaN, and if so set
1874 errno to EDOM. */
1876 static void
1877 expand_errno_check (tree exp, rtx target)
1879 rtx lab = gen_label_rtx ();
1881 /* Test the result; if it is NaN, set errno=EDOM because
1882 the argument was not in the domain. */
1883 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1884 NULL_RTX, NULL_RTX, lab);
1886 #ifdef TARGET_EDOM
1887 /* If this built-in doesn't throw an exception, set errno directly. */
1888 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1890 #ifdef GEN_ERRNO_RTX
1891 rtx errno_rtx = GEN_ERRNO_RTX;
1892 #else
1893 rtx errno_rtx
1894 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1895 #endif
1896 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1897 emit_label (lab);
1898 return;
1900 #endif
1902 /* Make sure the library call isn't expanded as a tail call. */
1903 CALL_EXPR_TAILCALL (exp) = 0;
1905 /* We can't set errno=EDOM directly; let the library call do it.
1906 Pop the arguments right away in case the call gets deleted. */
1907 NO_DEFER_POP;
1908 expand_call (exp, target, 0);
1909 OK_DEFER_POP;
1910 emit_label (lab);
1913 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1914 Return NULL_RTX if a normal call should be emitted rather than expanding
1915 the function in-line. EXP is the expression that is a call to the builtin
1916 function; if convenient, the result should be placed in TARGET.
1917 SUBTARGET may be used as the target for computing one of EXP's operands. */
1919 static rtx
1920 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1922 optab builtin_optab;
1923 rtx op0, insns;
1924 tree fndecl = get_callee_fndecl (exp);
1925 enum machine_mode mode;
1926 bool errno_set = false;
1927 tree arg;
1929 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1930 return NULL_RTX;
1932 arg = CALL_EXPR_ARG (exp, 0);
1934 switch (DECL_FUNCTION_CODE (fndecl))
1936 CASE_FLT_FN (BUILT_IN_SQRT):
1937 errno_set = ! tree_expr_nonnegative_p (arg);
1938 builtin_optab = sqrt_optab;
1939 break;
1940 CASE_FLT_FN (BUILT_IN_EXP):
1941 errno_set = true; builtin_optab = exp_optab; break;
1942 CASE_FLT_FN (BUILT_IN_EXP10):
1943 CASE_FLT_FN (BUILT_IN_POW10):
1944 errno_set = true; builtin_optab = exp10_optab; break;
1945 CASE_FLT_FN (BUILT_IN_EXP2):
1946 errno_set = true; builtin_optab = exp2_optab; break;
1947 CASE_FLT_FN (BUILT_IN_EXPM1):
1948 errno_set = true; builtin_optab = expm1_optab; break;
1949 CASE_FLT_FN (BUILT_IN_LOGB):
1950 errno_set = true; builtin_optab = logb_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOG):
1952 errno_set = true; builtin_optab = log_optab; break;
1953 CASE_FLT_FN (BUILT_IN_LOG10):
1954 errno_set = true; builtin_optab = log10_optab; break;
1955 CASE_FLT_FN (BUILT_IN_LOG2):
1956 errno_set = true; builtin_optab = log2_optab; break;
1957 CASE_FLT_FN (BUILT_IN_LOG1P):
1958 errno_set = true; builtin_optab = log1p_optab; break;
1959 CASE_FLT_FN (BUILT_IN_ASIN):
1960 builtin_optab = asin_optab; break;
1961 CASE_FLT_FN (BUILT_IN_ACOS):
1962 builtin_optab = acos_optab; break;
1963 CASE_FLT_FN (BUILT_IN_TAN):
1964 builtin_optab = tan_optab; break;
1965 CASE_FLT_FN (BUILT_IN_ATAN):
1966 builtin_optab = atan_optab; break;
1967 CASE_FLT_FN (BUILT_IN_FLOOR):
1968 builtin_optab = floor_optab; break;
1969 CASE_FLT_FN (BUILT_IN_CEIL):
1970 builtin_optab = ceil_optab; break;
1971 CASE_FLT_FN (BUILT_IN_TRUNC):
1972 builtin_optab = btrunc_optab; break;
1973 CASE_FLT_FN (BUILT_IN_ROUND):
1974 builtin_optab = round_optab; break;
1975 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1976 builtin_optab = nearbyint_optab;
1977 if (flag_trapping_math)
1978 break;
1979 /* Else fallthrough and expand as rint. */
1980 CASE_FLT_FN (BUILT_IN_RINT):
1981 builtin_optab = rint_optab; break;
1982 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1983 builtin_optab = significand_optab; break;
1984 default:
1985 gcc_unreachable ();
1988 /* Make a suitable register to place result in. */
1989 mode = TYPE_MODE (TREE_TYPE (exp));
1991 if (! flag_errno_math || ! HONOR_NANS (mode))
1992 errno_set = false;
1994 /* Before working hard, check whether the instruction is available. */
1995 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1997 target = gen_reg_rtx (mode);
1999 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2000 need to expand the argument again. This way, we will not perform
2001 side-effects more the once. */
2002 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2004 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2006 start_sequence ();
2008 /* Compute into TARGET.
2009 Set TARGET to wherever the result comes back. */
2010 target = expand_unop (mode, builtin_optab, op0, target, 0);
2012 if (target != 0)
2014 if (errno_set)
2015 expand_errno_check (exp, target);
2017 /* Output the entire sequence. */
2018 insns = get_insns ();
2019 end_sequence ();
2020 emit_insn (insns);
2021 return target;
2024 /* If we were unable to expand via the builtin, stop the sequence
2025 (without outputting the insns) and call to the library function
2026 with the stabilized argument list. */
2027 end_sequence ();
2030 return expand_call (exp, target, target == const0_rtx);
2033 /* Expand a call to the builtin binary math functions (pow and atan2).
2034 Return NULL_RTX if a normal call should be emitted rather than expanding the
2035 function in-line. EXP is the expression that is a call to the builtin
2036 function; if convenient, the result should be placed in TARGET.
2037 SUBTARGET may be used as the target for computing one of EXP's
2038 operands. */
2040 static rtx
2041 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2043 optab builtin_optab;
2044 rtx op0, op1, insns;
2045 int op1_type = REAL_TYPE;
2046 tree fndecl = get_callee_fndecl (exp);
2047 tree arg0, arg1;
2048 enum machine_mode mode;
2049 bool errno_set = true;
2051 switch (DECL_FUNCTION_CODE (fndecl))
2053 CASE_FLT_FN (BUILT_IN_SCALBN):
2054 CASE_FLT_FN (BUILT_IN_SCALBLN):
2055 CASE_FLT_FN (BUILT_IN_LDEXP):
2056 op1_type = INTEGER_TYPE;
2057 default:
2058 break;
2061 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2062 return NULL_RTX;
2064 arg0 = CALL_EXPR_ARG (exp, 0);
2065 arg1 = CALL_EXPR_ARG (exp, 1);
2067 switch (DECL_FUNCTION_CODE (fndecl))
2069 CASE_FLT_FN (BUILT_IN_POW):
2070 builtin_optab = pow_optab; break;
2071 CASE_FLT_FN (BUILT_IN_ATAN2):
2072 builtin_optab = atan2_optab; break;
2073 CASE_FLT_FN (BUILT_IN_SCALB):
2074 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2075 return 0;
2076 builtin_optab = scalb_optab; break;
2077 CASE_FLT_FN (BUILT_IN_SCALBN):
2078 CASE_FLT_FN (BUILT_IN_SCALBLN):
2079 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2080 return 0;
2081 /* Fall through... */
2082 CASE_FLT_FN (BUILT_IN_LDEXP):
2083 builtin_optab = ldexp_optab; break;
2084 CASE_FLT_FN (BUILT_IN_FMOD):
2085 builtin_optab = fmod_optab; break;
2086 CASE_FLT_FN (BUILT_IN_REMAINDER):
2087 CASE_FLT_FN (BUILT_IN_DREM):
2088 builtin_optab = remainder_optab; break;
2089 default:
2090 gcc_unreachable ();
2093 /* Make a suitable register to place result in. */
2094 mode = TYPE_MODE (TREE_TYPE (exp));
2096 /* Before working hard, check whether the instruction is available. */
2097 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2098 return NULL_RTX;
2100 target = gen_reg_rtx (mode);
2102 if (! flag_errno_math || ! HONOR_NANS (mode))
2103 errno_set = false;
2105 /* Always stabilize the argument list. */
2106 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2107 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2109 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2110 op1 = expand_normal (arg1);
2112 start_sequence ();
2114 /* Compute into TARGET.
2115 Set TARGET to wherever the result comes back. */
2116 target = expand_binop (mode, builtin_optab, op0, op1,
2117 target, 0, OPTAB_DIRECT);
2119 /* If we were unable to expand via the builtin, stop the sequence
2120 (without outputting the insns) and call to the library function
2121 with the stabilized argument list. */
2122 if (target == 0)
2124 end_sequence ();
2125 return expand_call (exp, target, target == const0_rtx);
2128 if (errno_set)
2129 expand_errno_check (exp, target);
2131 /* Output the entire sequence. */
2132 insns = get_insns ();
2133 end_sequence ();
2134 emit_insn (insns);
2136 return target;
2139 /* Expand a call to the builtin sin and cos math functions.
2140 Return NULL_RTX if a normal call should be emitted rather than expanding the
2141 function in-line. EXP is the expression that is a call to the builtin
2142 function; if convenient, the result should be placed in TARGET.
2143 SUBTARGET may be used as the target for computing one of EXP's
2144 operands. */
2146 static rtx
2147 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2149 optab builtin_optab;
2150 rtx op0, insns;
2151 tree fndecl = get_callee_fndecl (exp);
2152 enum machine_mode mode;
2153 tree arg;
2155 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2156 return NULL_RTX;
2158 arg = CALL_EXPR_ARG (exp, 0);
2160 switch (DECL_FUNCTION_CODE (fndecl))
2162 CASE_FLT_FN (BUILT_IN_SIN):
2163 CASE_FLT_FN (BUILT_IN_COS):
2164 builtin_optab = sincos_optab; break;
2165 default:
2166 gcc_unreachable ();
2169 /* Make a suitable register to place result in. */
2170 mode = TYPE_MODE (TREE_TYPE (exp));
2172 /* Check if sincos insn is available, otherwise fallback
2173 to sin or cos insn. */
2174 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2175 switch (DECL_FUNCTION_CODE (fndecl))
2177 CASE_FLT_FN (BUILT_IN_SIN):
2178 builtin_optab = sin_optab; break;
2179 CASE_FLT_FN (BUILT_IN_COS):
2180 builtin_optab = cos_optab; break;
2181 default:
2182 gcc_unreachable ();
2185 /* Before working hard, check whether the instruction is available. */
2186 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2188 target = gen_reg_rtx (mode);
2190 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2191 need to expand the argument again. This way, we will not perform
2192 side-effects more the once. */
2193 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2195 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2197 start_sequence ();
2199 /* Compute into TARGET.
2200 Set TARGET to wherever the result comes back. */
2201 if (builtin_optab == sincos_optab)
2203 int result;
2205 switch (DECL_FUNCTION_CODE (fndecl))
2207 CASE_FLT_FN (BUILT_IN_SIN):
2208 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2209 break;
2210 CASE_FLT_FN (BUILT_IN_COS):
2211 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2212 break;
2213 default:
2214 gcc_unreachable ();
2216 gcc_assert (result);
2218 else
2220 target = expand_unop (mode, builtin_optab, op0, target, 0);
2223 if (target != 0)
2225 /* Output the entire sequence. */
2226 insns = get_insns ();
2227 end_sequence ();
2228 emit_insn (insns);
2229 return target;
2232 /* If we were unable to expand via the builtin, stop the sequence
2233 (without outputting the insns) and call to the library function
2234 with the stabilized argument list. */
2235 end_sequence ();
2238 target = expand_call (exp, target, target == const0_rtx);
2240 return target;
2243 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2244 return an RTL instruction code that implements the functionality.
2245 If that isn't possible or available return CODE_FOR_nothing. */
2247 static enum insn_code
2248 interclass_mathfn_icode (tree arg, tree fndecl)
2250 bool errno_set = false;
2251 optab builtin_optab = 0;
2252 enum machine_mode mode;
2254 switch (DECL_FUNCTION_CODE (fndecl))
2256 CASE_FLT_FN (BUILT_IN_ILOGB):
2257 errno_set = true; builtin_optab = ilogb_optab; break;
2258 CASE_FLT_FN (BUILT_IN_ISINF):
2259 builtin_optab = isinf_optab; break;
2260 case BUILT_IN_ISNORMAL:
2261 case BUILT_IN_ISFINITE:
2262 CASE_FLT_FN (BUILT_IN_FINITE):
2263 case BUILT_IN_FINITED32:
2264 case BUILT_IN_FINITED64:
2265 case BUILT_IN_FINITED128:
2266 case BUILT_IN_ISINFD32:
2267 case BUILT_IN_ISINFD64:
2268 case BUILT_IN_ISINFD128:
2269 /* These builtins have no optabs (yet). */
2270 break;
2271 default:
2272 gcc_unreachable ();
2275 /* There's no easy way to detect the case we need to set EDOM. */
2276 if (flag_errno_math && errno_set)
2277 return CODE_FOR_nothing;
2279 /* Optab mode depends on the mode of the input argument. */
2280 mode = TYPE_MODE (TREE_TYPE (arg));
2282 if (builtin_optab)
2283 return optab_handler (builtin_optab, mode)->insn_code;
2284 return CODE_FOR_nothing;
2287 /* Expand a call to one of the builtin math functions that operate on
2288 floating point argument and output an integer result (ilogb, isinf,
2289 isnan, etc).
2290 Return 0 if a normal call should be emitted rather than expanding the
2291 function in-line. EXP is the expression that is a call to the builtin
2292 function; if convenient, the result should be placed in TARGET.
2293 SUBTARGET may be used as the target for computing one of EXP's operands. */
2295 static rtx
2296 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2298 enum insn_code icode = CODE_FOR_nothing;
2299 rtx op0;
2300 tree fndecl = get_callee_fndecl (exp);
2301 enum machine_mode mode;
2302 tree arg;
2304 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2305 return NULL_RTX;
2307 arg = CALL_EXPR_ARG (exp, 0);
2308 icode = interclass_mathfn_icode (arg, fndecl);
2309 mode = TYPE_MODE (TREE_TYPE (arg));
2311 if (icode != CODE_FOR_nothing)
2313 /* Make a suitable register to place result in. */
2314 if (!target
2315 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2316 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2318 gcc_assert (insn_data[icode].operand[0].predicate
2319 (target, GET_MODE (target)));
2321 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2322 need to expand the argument again. This way, we will not perform
2323 side-effects more the once. */
2324 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2326 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2328 if (mode != GET_MODE (op0))
2329 op0 = convert_to_mode (mode, op0, 0);
2331 /* Compute into TARGET.
2332 Set TARGET to wherever the result comes back. */
2333 emit_unop_insn (icode, target, op0, UNKNOWN);
2334 return target;
2337 return NULL_RTX;
2340 /* Expand a call to the builtin sincos math function.
2341 Return NULL_RTX if a normal call should be emitted rather than expanding the
2342 function in-line. EXP is the expression that is a call to the builtin
2343 function. */
2345 static rtx
2346 expand_builtin_sincos (tree exp)
2348 rtx op0, op1, op2, target1, target2;
2349 enum machine_mode mode;
2350 tree arg, sinp, cosp;
2351 int result;
2352 location_t loc = EXPR_LOCATION (exp);
2354 if (!validate_arglist (exp, REAL_TYPE,
2355 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2356 return NULL_RTX;
2358 arg = CALL_EXPR_ARG (exp, 0);
2359 sinp = CALL_EXPR_ARG (exp, 1);
2360 cosp = CALL_EXPR_ARG (exp, 2);
2362 /* Make a suitable register to place result in. */
2363 mode = TYPE_MODE (TREE_TYPE (arg));
2365 /* Check if sincos insn is available, otherwise emit the call. */
2366 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2367 return NULL_RTX;
2369 target1 = gen_reg_rtx (mode);
2370 target2 = gen_reg_rtx (mode);
2372 op0 = expand_normal (arg);
2373 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2374 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2376 /* Compute into target1 and target2.
2377 Set TARGET to wherever the result comes back. */
2378 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2379 gcc_assert (result);
2381 /* Move target1 and target2 to the memory locations indicated
2382 by op1 and op2. */
2383 emit_move_insn (op1, target1);
2384 emit_move_insn (op2, target2);
2386 return const0_rtx;
2389 /* Expand a call to the internal cexpi builtin to the sincos math function.
2390 EXP is the expression that is a call to the builtin function; if convenient,
2391 the result should be placed in TARGET. SUBTARGET may be used as the target
2392 for computing one of EXP's operands. */
2394 static rtx
2395 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2397 tree fndecl = get_callee_fndecl (exp);
2398 tree arg, type;
2399 enum machine_mode mode;
2400 rtx op0, op1, op2;
2401 location_t loc = EXPR_LOCATION (exp);
2403 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2404 return NULL_RTX;
2406 arg = CALL_EXPR_ARG (exp, 0);
2407 type = TREE_TYPE (arg);
2408 mode = TYPE_MODE (TREE_TYPE (arg));
2410 /* Try expanding via a sincos optab, fall back to emitting a libcall
2411 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2412 is only generated from sincos, cexp or if we have either of them. */
2413 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2415 op1 = gen_reg_rtx (mode);
2416 op2 = gen_reg_rtx (mode);
2418 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2420 /* Compute into op1 and op2. */
2421 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2423 else if (TARGET_HAS_SINCOS)
2425 tree call, fn = NULL_TREE;
2426 tree top1, top2;
2427 rtx op1a, op2a;
2429 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2430 fn = built_in_decls[BUILT_IN_SINCOSF];
2431 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2432 fn = built_in_decls[BUILT_IN_SINCOS];
2433 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2434 fn = built_in_decls[BUILT_IN_SINCOSL];
2435 else
2436 gcc_unreachable ();
2438 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2439 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2440 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2441 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2442 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2443 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2445 /* Make sure not to fold the sincos call again. */
2446 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2447 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2448 call, 3, arg, top1, top2));
2450 else
2452 tree call, fn = NULL_TREE, narg;
2453 tree ctype = build_complex_type (type);
2455 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2456 fn = built_in_decls[BUILT_IN_CEXPF];
2457 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2458 fn = built_in_decls[BUILT_IN_CEXP];
2459 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2460 fn = built_in_decls[BUILT_IN_CEXPL];
2461 else
2462 gcc_unreachable ();
2464 /* If we don't have a decl for cexp create one. This is the
2465 friendliest fallback if the user calls __builtin_cexpi
2466 without full target C99 function support. */
2467 if (fn == NULL_TREE)
2469 tree fntype;
2470 const char *name = NULL;
2472 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2473 name = "cexpf";
2474 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2475 name = "cexp";
2476 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2477 name = "cexpl";
2479 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2480 fn = build_fn_decl (name, fntype);
2483 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2484 build_real (type, dconst0), arg);
2486 /* Make sure not to fold the cexp call again. */
2487 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2488 return expand_expr (build_call_nary (ctype, call, 1, narg),
2489 target, VOIDmode, EXPAND_NORMAL);
2492 /* Now build the proper return type. */
2493 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2494 make_tree (TREE_TYPE (arg), op2),
2495 make_tree (TREE_TYPE (arg), op1)),
2496 target, VOIDmode, EXPAND_NORMAL);
2499 /* Conveniently construct a function call expression. FNDECL names the
2500 function to be called, N is the number of arguments, and the "..."
2501 parameters are the argument expressions. Unlike build_call_exr
2502 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2504 static tree
2505 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2507 va_list ap;
2508 tree fntype = TREE_TYPE (fndecl);
2509 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2511 va_start (ap, n);
2512 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2513 va_end (ap);
2514 SET_EXPR_LOCATION (fn, loc);
2515 return fn;
2517 #define build_call_nofold(...) \
2518 build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
2520 /* Expand a call to one of the builtin rounding functions gcc defines
2521 as an extension (lfloor and lceil). As these are gcc extensions we
2522 do not need to worry about setting errno to EDOM.
2523 If expanding via optab fails, lower expression to (int)(floor(x)).
2524 EXP is the expression that is a call to the builtin function;
2525 if convenient, the result should be placed in TARGET. */
2527 static rtx
2528 expand_builtin_int_roundingfn (tree exp, rtx target)
2530 convert_optab builtin_optab;
2531 rtx op0, insns, tmp;
2532 tree fndecl = get_callee_fndecl (exp);
2533 enum built_in_function fallback_fn;
2534 tree fallback_fndecl;
2535 enum machine_mode mode;
2536 tree arg;
2538 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2539 gcc_unreachable ();
2541 arg = CALL_EXPR_ARG (exp, 0);
2543 switch (DECL_FUNCTION_CODE (fndecl))
2545 CASE_FLT_FN (BUILT_IN_LCEIL):
2546 CASE_FLT_FN (BUILT_IN_LLCEIL):
2547 builtin_optab = lceil_optab;
2548 fallback_fn = BUILT_IN_CEIL;
2549 break;
2551 CASE_FLT_FN (BUILT_IN_LFLOOR):
2552 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2553 builtin_optab = lfloor_optab;
2554 fallback_fn = BUILT_IN_FLOOR;
2555 break;
2557 default:
2558 gcc_unreachable ();
2561 /* Make a suitable register to place result in. */
2562 mode = TYPE_MODE (TREE_TYPE (exp));
2564 target = gen_reg_rtx (mode);
2566 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2567 need to expand the argument again. This way, we will not perform
2568 side-effects more the once. */
2569 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2571 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2573 start_sequence ();
2575 /* Compute into TARGET. */
2576 if (expand_sfix_optab (target, op0, builtin_optab))
2578 /* Output the entire sequence. */
2579 insns = get_insns ();
2580 end_sequence ();
2581 emit_insn (insns);
2582 return target;
2585 /* If we were unable to expand via the builtin, stop the sequence
2586 (without outputting the insns). */
2587 end_sequence ();
2589 /* Fall back to floating point rounding optab. */
2590 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2592 /* For non-C99 targets we may end up without a fallback fndecl here
2593 if the user called __builtin_lfloor directly. In this case emit
2594 a call to the floor/ceil variants nevertheless. This should result
2595 in the best user experience for not full C99 targets. */
2596 if (fallback_fndecl == NULL_TREE)
2598 tree fntype;
2599 const char *name = NULL;
2601 switch (DECL_FUNCTION_CODE (fndecl))
2603 case BUILT_IN_LCEIL:
2604 case BUILT_IN_LLCEIL:
2605 name = "ceil";
2606 break;
2607 case BUILT_IN_LCEILF:
2608 case BUILT_IN_LLCEILF:
2609 name = "ceilf";
2610 break;
2611 case BUILT_IN_LCEILL:
2612 case BUILT_IN_LLCEILL:
2613 name = "ceill";
2614 break;
2615 case BUILT_IN_LFLOOR:
2616 case BUILT_IN_LLFLOOR:
2617 name = "floor";
2618 break;
2619 case BUILT_IN_LFLOORF:
2620 case BUILT_IN_LLFLOORF:
2621 name = "floorf";
2622 break;
2623 case BUILT_IN_LFLOORL:
2624 case BUILT_IN_LLFLOORL:
2625 name = "floorl";
2626 break;
2627 default:
2628 gcc_unreachable ();
2631 fntype = build_function_type_list (TREE_TYPE (arg),
2632 TREE_TYPE (arg), NULL_TREE);
2633 fallback_fndecl = build_fn_decl (name, fntype);
2636 exp = build_call_nofold (fallback_fndecl, 1, arg);
2638 tmp = expand_normal (exp);
2640 /* Truncate the result of floating point optab to integer
2641 via expand_fix (). */
2642 target = gen_reg_rtx (mode);
2643 expand_fix (target, tmp, 0);
2645 return target;
2648 /* Expand a call to one of the builtin math functions doing integer
2649 conversion (lrint).
2650 Return 0 if a normal call should be emitted rather than expanding the
2651 function in-line. EXP is the expression that is a call to the builtin
2652 function; if convenient, the result should be placed in TARGET. */
2654 static rtx
2655 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2657 convert_optab builtin_optab;
2658 rtx op0, insns;
2659 tree fndecl = get_callee_fndecl (exp);
2660 tree arg;
2661 enum machine_mode mode;
2663 /* There's no easy way to detect the case we need to set EDOM. */
2664 if (flag_errno_math)
2665 return NULL_RTX;
2667 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2668 gcc_unreachable ();
2670 arg = CALL_EXPR_ARG (exp, 0);
2672 switch (DECL_FUNCTION_CODE (fndecl))
2674 CASE_FLT_FN (BUILT_IN_LRINT):
2675 CASE_FLT_FN (BUILT_IN_LLRINT):
2676 builtin_optab = lrint_optab; break;
2677 CASE_FLT_FN (BUILT_IN_LROUND):
2678 CASE_FLT_FN (BUILT_IN_LLROUND):
2679 builtin_optab = lround_optab; break;
2680 default:
2681 gcc_unreachable ();
2684 /* Make a suitable register to place result in. */
2685 mode = TYPE_MODE (TREE_TYPE (exp));
2687 target = gen_reg_rtx (mode);
2689 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2690 need to expand the argument again. This way, we will not perform
2691 side-effects more the once. */
2692 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2694 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2696 start_sequence ();
2698 if (expand_sfix_optab (target, op0, builtin_optab))
2700 /* Output the entire sequence. */
2701 insns = get_insns ();
2702 end_sequence ();
2703 emit_insn (insns);
2704 return target;
2707 /* If we were unable to expand via the builtin, stop the sequence
2708 (without outputting the insns) and call to the library function
2709 with the stabilized argument list. */
2710 end_sequence ();
2712 target = expand_call (exp, target, target == const0_rtx);
2714 return target;
2717 /* To evaluate powi(x,n), the floating point value x raised to the
2718 constant integer exponent n, we use a hybrid algorithm that
2719 combines the "window method" with look-up tables. For an
2720 introduction to exponentiation algorithms and "addition chains",
2721 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2722 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2723 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2724 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2726 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2727 multiplications to inline before calling the system library's pow
2728 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2729 so this default never requires calling pow, powf or powl. */
2731 #ifndef POWI_MAX_MULTS
2732 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2733 #endif
2735 /* The size of the "optimal power tree" lookup table. All
2736 exponents less than this value are simply looked up in the
2737 powi_table below. This threshold is also used to size the
2738 cache of pseudo registers that hold intermediate results. */
2739 #define POWI_TABLE_SIZE 256
2741 /* The size, in bits of the window, used in the "window method"
2742 exponentiation algorithm. This is equivalent to a radix of
2743 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2744 #define POWI_WINDOW_SIZE 3
2746 /* The following table is an efficient representation of an
2747 "optimal power tree". For each value, i, the corresponding
2748 value, j, in the table states than an optimal evaluation
2749 sequence for calculating pow(x,i) can be found by evaluating
2750 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2751 100 integers is given in Knuth's "Seminumerical algorithms". */
2753 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2755 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2756 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2757 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2758 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2759 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2760 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2761 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2762 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2763 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2764 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2765 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2766 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2767 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2768 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2769 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2770 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2771 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2772 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2773 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2774 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2775 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2776 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2777 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2778 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2779 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2780 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2781 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2782 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2783 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2784 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2785 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2786 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2790 /* Return the number of multiplications required to calculate
2791 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2792 subroutine of powi_cost. CACHE is an array indicating
2793 which exponents have already been calculated. */
2795 static int
2796 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2798 /* If we've already calculated this exponent, then this evaluation
2799 doesn't require any additional multiplications. */
2800 if (cache[n])
2801 return 0;
2803 cache[n] = true;
2804 return powi_lookup_cost (n - powi_table[n], cache)
2805 + powi_lookup_cost (powi_table[n], cache) + 1;
2808 /* Return the number of multiplications required to calculate
2809 powi(x,n) for an arbitrary x, given the exponent N. This
2810 function needs to be kept in sync with expand_powi below. */
2812 static int
2813 powi_cost (HOST_WIDE_INT n)
2815 bool cache[POWI_TABLE_SIZE];
2816 unsigned HOST_WIDE_INT digit;
2817 unsigned HOST_WIDE_INT val;
2818 int result;
2820 if (n == 0)
2821 return 0;
2823 /* Ignore the reciprocal when calculating the cost. */
2824 val = (n < 0) ? -n : n;
2826 /* Initialize the exponent cache. */
2827 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2828 cache[1] = true;
2830 result = 0;
2832 while (val >= POWI_TABLE_SIZE)
2834 if (val & 1)
2836 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2837 result += powi_lookup_cost (digit, cache)
2838 + POWI_WINDOW_SIZE + 1;
2839 val >>= POWI_WINDOW_SIZE;
2841 else
2843 val >>= 1;
2844 result++;
2848 return result + powi_lookup_cost (val, cache);
2851 /* Recursive subroutine of expand_powi. This function takes the array,
2852 CACHE, of already calculated exponents and an exponent N and returns
2853 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2855 static rtx
2856 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2858 unsigned HOST_WIDE_INT digit;
2859 rtx target, result;
2860 rtx op0, op1;
2862 if (n < POWI_TABLE_SIZE)
2864 if (cache[n])
2865 return cache[n];
2867 target = gen_reg_rtx (mode);
2868 cache[n] = target;
2870 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2871 op1 = expand_powi_1 (mode, powi_table[n], cache);
2873 else if (n & 1)
2875 target = gen_reg_rtx (mode);
2876 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2877 op0 = expand_powi_1 (mode, n - digit, cache);
2878 op1 = expand_powi_1 (mode, digit, cache);
2880 else
2882 target = gen_reg_rtx (mode);
2883 op0 = expand_powi_1 (mode, n >> 1, cache);
2884 op1 = op0;
2887 result = expand_mult (mode, op0, op1, target, 0);
2888 if (result != target)
2889 emit_move_insn (target, result);
2890 return target;
2893 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2894 floating point operand in mode MODE, and N is the exponent. This
2895 function needs to be kept in sync with powi_cost above. */
2897 static rtx
2898 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2900 rtx cache[POWI_TABLE_SIZE];
2901 rtx result;
2903 if (n == 0)
2904 return CONST1_RTX (mode);
2906 memset (cache, 0, sizeof (cache));
2907 cache[1] = x;
2909 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2911 /* If the original exponent was negative, reciprocate the result. */
2912 if (n < 0)
2913 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2914 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2916 return result;
2919 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2920 a normal call should be emitted rather than expanding the function
2921 in-line. EXP is the expression that is a call to the builtin
2922 function; if convenient, the result should be placed in TARGET. */
2924 static rtx
2925 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2927 tree arg0, arg1;
2928 tree fn, narg0;
2929 tree type = TREE_TYPE (exp);
2930 REAL_VALUE_TYPE cint, c, c2;
2931 HOST_WIDE_INT n;
2932 rtx op, op2;
2933 enum machine_mode mode = TYPE_MODE (type);
2935 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2936 return NULL_RTX;
2938 arg0 = CALL_EXPR_ARG (exp, 0);
2939 arg1 = CALL_EXPR_ARG (exp, 1);
2941 if (TREE_CODE (arg1) != REAL_CST
2942 || TREE_OVERFLOW (arg1))
2943 return expand_builtin_mathfn_2 (exp, target, subtarget);
2945 /* Handle constant exponents. */
2947 /* For integer valued exponents we can expand to an optimal multiplication
2948 sequence using expand_powi. */
2949 c = TREE_REAL_CST (arg1);
2950 n = real_to_integer (&c);
2951 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2952 if (real_identical (&c, &cint)
2953 && ((n >= -1 && n <= 2)
2954 || (flag_unsafe_math_optimizations
2955 && optimize_insn_for_speed_p ()
2956 && powi_cost (n) <= POWI_MAX_MULTS)))
2958 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2959 if (n != 1)
2961 op = force_reg (mode, op);
2962 op = expand_powi (op, mode, n);
2964 return op;
2967 narg0 = builtin_save_expr (arg0);
2969 /* If the exponent is not integer valued, check if it is half of an integer.
2970 In this case we can expand to sqrt (x) * x**(n/2). */
2971 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2972 if (fn != NULL_TREE)
2974 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2975 n = real_to_integer (&c2);
2976 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2977 if (real_identical (&c2, &cint)
2978 && ((flag_unsafe_math_optimizations
2979 && optimize_insn_for_speed_p ()
2980 && powi_cost (n/2) <= POWI_MAX_MULTS)
2981 || n == 1))
2983 tree call_expr = build_call_nofold (fn, 1, narg0);
2984 /* Use expand_expr in case the newly built call expression
2985 was folded to a non-call. */
2986 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2987 if (n != 1)
2989 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2990 op2 = force_reg (mode, op2);
2991 op2 = expand_powi (op2, mode, abs (n / 2));
2992 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2993 0, OPTAB_LIB_WIDEN);
2994 /* If the original exponent was negative, reciprocate the
2995 result. */
2996 if (n < 0)
2997 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2998 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3000 return op;
3004 /* Try if the exponent is a third of an integer. In this case
3005 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3006 different from pow (x, 1./3.) due to rounding and behavior
3007 with negative x we need to constrain this transformation to
3008 unsafe math and positive x or finite math. */
3009 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3010 if (fn != NULL_TREE
3011 && flag_unsafe_math_optimizations
3012 && (tree_expr_nonnegative_p (arg0)
3013 || !HONOR_NANS (mode)))
3015 REAL_VALUE_TYPE dconst3;
3016 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3017 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3018 real_round (&c2, mode, &c2);
3019 n = real_to_integer (&c2);
3020 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3021 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3022 real_convert (&c2, mode, &c2);
3023 if (real_identical (&c2, &c)
3024 && ((optimize_insn_for_speed_p ()
3025 && powi_cost (n/3) <= POWI_MAX_MULTS)
3026 || n == 1))
3028 tree call_expr = build_call_nofold (fn, 1,narg0);
3029 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3030 if (abs (n) % 3 == 2)
3031 op = expand_simple_binop (mode, MULT, op, op, op,
3032 0, OPTAB_LIB_WIDEN);
3033 if (n != 1)
3035 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3036 op2 = force_reg (mode, op2);
3037 op2 = expand_powi (op2, mode, abs (n / 3));
3038 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3039 0, OPTAB_LIB_WIDEN);
3040 /* If the original exponent was negative, reciprocate the
3041 result. */
3042 if (n < 0)
3043 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3044 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3046 return op;
3050 /* Fall back to optab expansion. */
3051 return expand_builtin_mathfn_2 (exp, target, subtarget);
3054 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3055 a normal call should be emitted rather than expanding the function
3056 in-line. EXP is the expression that is a call to the builtin
3057 function; if convenient, the result should be placed in TARGET. */
3059 static rtx
3060 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3062 tree arg0, arg1;
3063 rtx op0, op1;
3064 enum machine_mode mode;
3065 enum machine_mode mode2;
3067 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3068 return NULL_RTX;
3070 arg0 = CALL_EXPR_ARG (exp, 0);
3071 arg1 = CALL_EXPR_ARG (exp, 1);
3072 mode = TYPE_MODE (TREE_TYPE (exp));
3074 /* Handle constant power. */
3076 if (TREE_CODE (arg1) == INTEGER_CST
3077 && !TREE_OVERFLOW (arg1))
3079 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3081 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3082 Otherwise, check the number of multiplications required. */
3083 if ((TREE_INT_CST_HIGH (arg1) == 0
3084 || TREE_INT_CST_HIGH (arg1) == -1)
3085 && ((n >= -1 && n <= 2)
3086 || (optimize_insn_for_speed_p ()
3087 && powi_cost (n) <= POWI_MAX_MULTS)))
3089 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3090 op0 = force_reg (mode, op0);
3091 return expand_powi (op0, mode, n);
3095 /* Emit a libcall to libgcc. */
3097 /* Mode of the 2nd argument must match that of an int. */
3098 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3100 if (target == NULL_RTX)
3101 target = gen_reg_rtx (mode);
3103 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3104 if (GET_MODE (op0) != mode)
3105 op0 = convert_to_mode (mode, op0, 0);
3106 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3107 if (GET_MODE (op1) != mode2)
3108 op1 = convert_to_mode (mode2, op1, 0);
3110 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3111 target, LCT_CONST, mode, 2,
3112 op0, mode, op1, mode2);
3114 return target;
3117 /* Expand expression EXP which is a call to the strlen builtin. Return
3118 NULL_RTX if we failed the caller should emit a normal call, otherwise
3119 try to get the result in TARGET, if convenient. */
3121 static rtx
3122 expand_builtin_strlen (tree exp, rtx target,
3123 enum machine_mode target_mode)
3125 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3126 return NULL_RTX;
3127 else
3129 rtx pat;
3130 tree len;
3131 tree src = CALL_EXPR_ARG (exp, 0);
3132 rtx result, src_reg, char_rtx, before_strlen;
3133 enum machine_mode insn_mode = target_mode, char_mode;
3134 enum insn_code icode = CODE_FOR_nothing;
3135 int align;
3137 /* If the length can be computed at compile-time, return it. */
3138 len = c_strlen (src, 0);
3139 if (len)
3140 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3142 /* If the length can be computed at compile-time and is constant
3143 integer, but there are side-effects in src, evaluate
3144 src for side-effects, then return len.
3145 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3146 can be optimized into: i++; x = 3; */
3147 len = c_strlen (src, 1);
3148 if (len && TREE_CODE (len) == INTEGER_CST)
3150 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3151 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3154 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3156 /* If SRC is not a pointer type, don't do this operation inline. */
3157 if (align == 0)
3158 return NULL_RTX;
3160 /* Bail out if we can't compute strlen in the right mode. */
3161 while (insn_mode != VOIDmode)
3163 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3164 if (icode != CODE_FOR_nothing)
3165 break;
3167 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3169 if (insn_mode == VOIDmode)
3170 return NULL_RTX;
3172 /* Make a place to write the result of the instruction. */
3173 result = target;
3174 if (! (result != 0
3175 && REG_P (result)
3176 && GET_MODE (result) == insn_mode
3177 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3178 result = gen_reg_rtx (insn_mode);
3180 /* Make a place to hold the source address. We will not expand
3181 the actual source until we are sure that the expansion will
3182 not fail -- there are trees that cannot be expanded twice. */
3183 src_reg = gen_reg_rtx (Pmode);
3185 /* Mark the beginning of the strlen sequence so we can emit the
3186 source operand later. */
3187 before_strlen = get_last_insn ();
3189 char_rtx = const0_rtx;
3190 char_mode = insn_data[(int) icode].operand[2].mode;
3191 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3192 char_mode))
3193 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3195 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3196 char_rtx, GEN_INT (align));
3197 if (! pat)
3198 return NULL_RTX;
3199 emit_insn (pat);
3201 /* Now that we are assured of success, expand the source. */
3202 start_sequence ();
3203 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3204 if (pat != src_reg)
3205 emit_move_insn (src_reg, pat);
3206 pat = get_insns ();
3207 end_sequence ();
3209 if (before_strlen)
3210 emit_insn_after (pat, before_strlen);
3211 else
3212 emit_insn_before (pat, get_insns ());
3214 /* Return the value in the proper mode for this function. */
3215 if (GET_MODE (result) == target_mode)
3216 target = result;
3217 else if (target != 0)
3218 convert_move (target, result, 0);
3219 else
3220 target = convert_to_mode (target_mode, result, 0);
3222 return target;
3226 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3227 bytes from constant string DATA + OFFSET and return it as target
3228 constant. */
3230 static rtx
3231 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3232 enum machine_mode mode)
3234 const char *str = (const char *) data;
3236 gcc_assert (offset >= 0
3237 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3238 <= strlen (str) + 1));
3240 return c_readstr (str + offset, mode);
3243 /* Expand a call EXP to the memcpy builtin.
3244 Return NULL_RTX if we failed, the caller should emit a normal call,
3245 otherwise try to get the result in TARGET, if convenient (and in
3246 mode MODE if that's convenient). */
3248 static rtx
3249 expand_builtin_memcpy (tree exp, rtx target)
3251 if (!validate_arglist (exp,
3252 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3253 return NULL_RTX;
3254 else
3256 tree dest = CALL_EXPR_ARG (exp, 0);
3257 tree src = CALL_EXPR_ARG (exp, 1);
3258 tree len = CALL_EXPR_ARG (exp, 2);
3259 const char *src_str;
3260 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3261 unsigned int dest_align
3262 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3263 rtx dest_mem, src_mem, dest_addr, len_rtx;
3264 HOST_WIDE_INT expected_size = -1;
3265 unsigned int expected_align = 0;
3267 /* If DEST is not a pointer type, call the normal function. */
3268 if (dest_align == 0)
3269 return NULL_RTX;
3271 /* If either SRC is not a pointer type, don't do this
3272 operation in-line. */
3273 if (src_align == 0)
3274 return NULL_RTX;
3276 if (currently_expanding_gimple_stmt)
3277 stringop_block_profile (currently_expanding_gimple_stmt,
3278 &expected_align, &expected_size);
3280 if (expected_align < dest_align)
3281 expected_align = dest_align;
3282 dest_mem = get_memory_rtx (dest, len);
3283 set_mem_align (dest_mem, dest_align);
3284 len_rtx = expand_normal (len);
3285 src_str = c_getstr (src);
3287 /* If SRC is a string constant and block move would be done
3288 by pieces, we can avoid loading the string from memory
3289 and only stored the computed constants. */
3290 if (src_str
3291 && CONST_INT_P (len_rtx)
3292 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3293 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3294 CONST_CAST (char *, src_str),
3295 dest_align, false))
3297 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3298 builtin_memcpy_read_str,
3299 CONST_CAST (char *, src_str),
3300 dest_align, false, 0);
3301 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3302 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3303 return dest_mem;
3306 src_mem = get_memory_rtx (src, len);
3307 set_mem_align (src_mem, src_align);
3309 /* Copy word part most expediently. */
3310 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3311 CALL_EXPR_TAILCALL (exp)
3312 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3313 expected_align, expected_size);
3315 if (dest_addr == 0)
3317 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3318 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3320 return dest_addr;
3324 /* Expand a call EXP to the mempcpy builtin.
3325 Return NULL_RTX if we failed; the caller should emit a normal call,
3326 otherwise try to get the result in TARGET, if convenient (and in
3327 mode MODE if that's convenient). If ENDP is 0 return the
3328 destination pointer, if ENDP is 1 return the end pointer ala
3329 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3330 stpcpy. */
3332 static rtx
3333 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3335 if (!validate_arglist (exp,
3336 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3337 return NULL_RTX;
3338 else
3340 tree dest = CALL_EXPR_ARG (exp, 0);
3341 tree src = CALL_EXPR_ARG (exp, 1);
3342 tree len = CALL_EXPR_ARG (exp, 2);
3343 return expand_builtin_mempcpy_args (dest, src, len,
3344 target, mode, /*endp=*/ 1);
3348 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3349 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3350 so that this can also be called without constructing an actual CALL_EXPR.
3351 The other arguments and return value are the same as for
3352 expand_builtin_mempcpy. */
3354 static rtx
3355 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3356 rtx target, enum machine_mode mode, int endp)
3358 /* If return value is ignored, transform mempcpy into memcpy. */
3359 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3361 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3362 tree result = build_call_nofold (fn, 3, dest, src, len);
3363 return expand_expr (result, target, mode, EXPAND_NORMAL);
3365 else
3367 const char *src_str;
3368 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3369 unsigned int dest_align
3370 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3371 rtx dest_mem, src_mem, len_rtx;
3373 /* If either SRC or DEST is not a pointer type, don't do this
3374 operation in-line. */
3375 if (dest_align == 0 || src_align == 0)
3376 return NULL_RTX;
3378 /* If LEN is not constant, call the normal function. */
3379 if (! host_integerp (len, 1))
3380 return NULL_RTX;
3382 len_rtx = expand_normal (len);
3383 src_str = c_getstr (src);
3385 /* If SRC is a string constant and block move would be done
3386 by pieces, we can avoid loading the string from memory
3387 and only stored the computed constants. */
3388 if (src_str
3389 && CONST_INT_P (len_rtx)
3390 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3391 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3392 CONST_CAST (char *, src_str),
3393 dest_align, false))
3395 dest_mem = get_memory_rtx (dest, len);
3396 set_mem_align (dest_mem, dest_align);
3397 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3398 builtin_memcpy_read_str,
3399 CONST_CAST (char *, src_str),
3400 dest_align, false, endp);
3401 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3402 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3403 return dest_mem;
3406 if (CONST_INT_P (len_rtx)
3407 && can_move_by_pieces (INTVAL (len_rtx),
3408 MIN (dest_align, src_align)))
3410 dest_mem = get_memory_rtx (dest, len);
3411 set_mem_align (dest_mem, dest_align);
3412 src_mem = get_memory_rtx (src, len);
3413 set_mem_align (src_mem, src_align);
3414 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3415 MIN (dest_align, src_align), endp);
3416 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3417 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3418 return dest_mem;
3421 return NULL_RTX;
3425 #ifndef HAVE_movstr
3426 # define HAVE_movstr 0
3427 # define CODE_FOR_movstr CODE_FOR_nothing
3428 #endif
3430 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3431 we failed, the caller should emit a normal call, otherwise try to
3432 get the result in TARGET, if convenient. If ENDP is 0 return the
3433 destination pointer, if ENDP is 1 return the end pointer ala
3434 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3435 stpcpy. */
3437 static rtx
3438 expand_movstr (tree dest, tree src, rtx target, int endp)
3440 rtx end;
3441 rtx dest_mem;
3442 rtx src_mem;
3443 rtx insn;
3444 const struct insn_data * data;
3446 if (!HAVE_movstr)
3447 return NULL_RTX;
3449 dest_mem = get_memory_rtx (dest, NULL);
3450 src_mem = get_memory_rtx (src, NULL);
3451 if (!endp)
3453 target = force_reg (Pmode, XEXP (dest_mem, 0));
3454 dest_mem = replace_equiv_address (dest_mem, target);
3455 end = gen_reg_rtx (Pmode);
3457 else
3459 if (target == 0 || target == const0_rtx)
3461 end = gen_reg_rtx (Pmode);
3462 if (target == 0)
3463 target = end;
3465 else
3466 end = target;
3469 data = insn_data + CODE_FOR_movstr;
3471 if (data->operand[0].mode != VOIDmode)
3472 end = gen_lowpart (data->operand[0].mode, end);
3474 insn = data->genfun (end, dest_mem, src_mem);
3476 gcc_assert (insn);
3478 emit_insn (insn);
3480 /* movstr is supposed to set end to the address of the NUL
3481 terminator. If the caller requested a mempcpy-like return value,
3482 adjust it. */
3483 if (endp == 1 && target != const0_rtx)
3485 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3486 emit_move_insn (target, force_operand (tem, NULL_RTX));
3489 return target;
3492 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3493 NULL_RTX if we failed the caller should emit a normal call, otherwise
3494 try to get the result in TARGET, if convenient (and in mode MODE if that's
3495 convenient). */
3497 static rtx
3498 expand_builtin_strcpy (tree exp, rtx target)
3500 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3502 tree dest = CALL_EXPR_ARG (exp, 0);
3503 tree src = CALL_EXPR_ARG (exp, 1);
3504 return expand_builtin_strcpy_args (dest, src, target);
3506 return NULL_RTX;
3509 /* Helper function to do the actual work for expand_builtin_strcpy. The
3510 arguments to the builtin_strcpy call DEST and SRC are broken out
3511 so that this can also be called without constructing an actual CALL_EXPR.
3512 The other arguments and return value are the same as for
3513 expand_builtin_strcpy. */
3515 static rtx
3516 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3518 return expand_movstr (dest, src, target, /*endp=*/0);
3521 /* Expand a call EXP to the stpcpy builtin.
3522 Return NULL_RTX if we failed the caller should emit a normal call,
3523 otherwise try to get the result in TARGET, if convenient (and in
3524 mode MODE if that's convenient). */
3526 static rtx
3527 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3529 tree dst, src;
3530 location_t loc = EXPR_LOCATION (exp);
3532 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3533 return NULL_RTX;
3535 dst = CALL_EXPR_ARG (exp, 0);
3536 src = CALL_EXPR_ARG (exp, 1);
3538 /* If return value is ignored, transform stpcpy into strcpy. */
3539 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3541 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3542 tree result = build_call_nofold (fn, 2, dst, src);
3543 return expand_expr (result, target, mode, EXPAND_NORMAL);
3545 else
3547 tree len, lenp1;
3548 rtx ret;
3550 /* Ensure we get an actual string whose length can be evaluated at
3551 compile-time, not an expression containing a string. This is
3552 because the latter will potentially produce pessimized code
3553 when used to produce the return value. */
3554 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3555 return expand_movstr (dst, src, target, /*endp=*/2);
3557 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3558 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3559 target, mode, /*endp=*/2);
3561 if (ret)
3562 return ret;
3564 if (TREE_CODE (len) == INTEGER_CST)
3566 rtx len_rtx = expand_normal (len);
3568 if (CONST_INT_P (len_rtx))
3570 ret = expand_builtin_strcpy_args (dst, src, target);
3572 if (ret)
3574 if (! target)
3576 if (mode != VOIDmode)
3577 target = gen_reg_rtx (mode);
3578 else
3579 target = gen_reg_rtx (GET_MODE (ret));
3581 if (GET_MODE (target) != GET_MODE (ret))
3582 ret = gen_lowpart (GET_MODE (target), ret);
3584 ret = plus_constant (ret, INTVAL (len_rtx));
3585 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3586 gcc_assert (ret);
3588 return target;
3593 return expand_movstr (dst, src, target, /*endp=*/2);
3597 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3598 bytes from constant string DATA + OFFSET and return it as target
3599 constant. */
3602 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3603 enum machine_mode mode)
3605 const char *str = (const char *) data;
3607 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3608 return const0_rtx;
3610 return c_readstr (str + offset, mode);
3613 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3614 NULL_RTX if we failed the caller should emit a normal call. */
3616 static rtx
3617 expand_builtin_strncpy (tree exp, rtx target)
3619 location_t loc = EXPR_LOCATION (exp);
3621 if (validate_arglist (exp,
3622 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3624 tree dest = CALL_EXPR_ARG (exp, 0);
3625 tree src = CALL_EXPR_ARG (exp, 1);
3626 tree len = CALL_EXPR_ARG (exp, 2);
3627 tree slen = c_strlen (src, 1);
3629 /* We must be passed a constant len and src parameter. */
3630 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3631 return NULL_RTX;
3633 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3635 /* We're required to pad with trailing zeros if the requested
3636 len is greater than strlen(s2)+1. In that case try to
3637 use store_by_pieces, if it fails, punt. */
3638 if (tree_int_cst_lt (slen, len))
3640 unsigned int dest_align
3641 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3642 const char *p = c_getstr (src);
3643 rtx dest_mem;
3645 if (!p || dest_align == 0 || !host_integerp (len, 1)
3646 || !can_store_by_pieces (tree_low_cst (len, 1),
3647 builtin_strncpy_read_str,
3648 CONST_CAST (char *, p),
3649 dest_align, false))
3650 return NULL_RTX;
3652 dest_mem = get_memory_rtx (dest, len);
3653 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3654 builtin_strncpy_read_str,
3655 CONST_CAST (char *, p), dest_align, false, 0);
3656 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3657 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3658 return dest_mem;
3661 return NULL_RTX;
3664 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3665 bytes from constant string DATA + OFFSET and return it as target
3666 constant. */
3669 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3670 enum machine_mode mode)
3672 const char *c = (const char *) data;
3673 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3675 memset (p, *c, GET_MODE_SIZE (mode));
3677 return c_readstr (p, mode);
3680 /* Callback routine for store_by_pieces. Return the RTL of a register
3681 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3682 char value given in the RTL register data. For example, if mode is
3683 4 bytes wide, return the RTL for 0x01010101*data. */
3685 static rtx
3686 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3687 enum machine_mode mode)
3689 rtx target, coeff;
3690 size_t size;
3691 char *p;
3693 size = GET_MODE_SIZE (mode);
3694 if (size == 1)
3695 return (rtx) data;
3697 p = XALLOCAVEC (char, size);
3698 memset (p, 1, size);
3699 coeff = c_readstr (p, mode);
3701 target = convert_to_mode (mode, (rtx) data, 1);
3702 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3703 return force_reg (mode, target);
3706 /* Expand expression EXP, which is a call to the memset builtin. Return
3707 NULL_RTX if we failed the caller should emit a normal call, otherwise
3708 try to get the result in TARGET, if convenient (and in mode MODE if that's
3709 convenient). */
3711 static rtx
3712 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3714 if (!validate_arglist (exp,
3715 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3716 return NULL_RTX;
3717 else
3719 tree dest = CALL_EXPR_ARG (exp, 0);
3720 tree val = CALL_EXPR_ARG (exp, 1);
3721 tree len = CALL_EXPR_ARG (exp, 2);
3722 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3726 /* Helper function to do the actual work for expand_builtin_memset. The
3727 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3728 so that this can also be called without constructing an actual CALL_EXPR.
3729 The other arguments and return value are the same as for
3730 expand_builtin_memset. */
3732 static rtx
3733 expand_builtin_memset_args (tree dest, tree val, tree len,
3734 rtx target, enum machine_mode mode, tree orig_exp)
3736 tree fndecl, fn;
3737 enum built_in_function fcode;
3738 char c;
3739 unsigned int dest_align;
3740 rtx dest_mem, dest_addr, len_rtx;
3741 HOST_WIDE_INT expected_size = -1;
3742 unsigned int expected_align = 0;
3744 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3746 /* If DEST is not a pointer type, don't do this operation in-line. */
3747 if (dest_align == 0)
3748 return NULL_RTX;
3750 if (currently_expanding_gimple_stmt)
3751 stringop_block_profile (currently_expanding_gimple_stmt,
3752 &expected_align, &expected_size);
3754 if (expected_align < dest_align)
3755 expected_align = dest_align;
3757 /* If the LEN parameter is zero, return DEST. */
3758 if (integer_zerop (len))
3760 /* Evaluate and ignore VAL in case it has side-effects. */
3761 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3762 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3765 /* Stabilize the arguments in case we fail. */
3766 dest = builtin_save_expr (dest);
3767 val = builtin_save_expr (val);
3768 len = builtin_save_expr (len);
3770 len_rtx = expand_normal (len);
3771 dest_mem = get_memory_rtx (dest, len);
3773 if (TREE_CODE (val) != INTEGER_CST)
3775 rtx val_rtx;
3777 val_rtx = expand_normal (val);
3778 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3779 val_rtx, 0);
3781 /* Assume that we can memset by pieces if we can store
3782 * the coefficients by pieces (in the required modes).
3783 * We can't pass builtin_memset_gen_str as that emits RTL. */
3784 c = 1;
3785 if (host_integerp (len, 1)
3786 && can_store_by_pieces (tree_low_cst (len, 1),
3787 builtin_memset_read_str, &c, dest_align,
3788 true))
3790 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3791 val_rtx);
3792 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3793 builtin_memset_gen_str, val_rtx, dest_align,
3794 true, 0);
3796 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3797 dest_align, expected_align,
3798 expected_size))
3799 goto do_libcall;
3801 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3802 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3803 return dest_mem;
3806 if (target_char_cast (val, &c))
3807 goto do_libcall;
3809 if (c)
3811 if (host_integerp (len, 1)
3812 && can_store_by_pieces (tree_low_cst (len, 1),
3813 builtin_memset_read_str, &c, dest_align,
3814 true))
3815 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3816 builtin_memset_read_str, &c, dest_align, true, 0);
3817 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3818 dest_align, expected_align,
3819 expected_size))
3820 goto do_libcall;
3822 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3823 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3824 return dest_mem;
3827 set_mem_align (dest_mem, dest_align);
3828 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3829 CALL_EXPR_TAILCALL (orig_exp)
3830 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3831 expected_align, expected_size);
3833 if (dest_addr == 0)
3835 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3836 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3839 return dest_addr;
3841 do_libcall:
3842 fndecl = get_callee_fndecl (orig_exp);
3843 fcode = DECL_FUNCTION_CODE (fndecl);
3844 if (fcode == BUILT_IN_MEMSET)
3845 fn = build_call_nofold (fndecl, 3, dest, val, len);
3846 else if (fcode == BUILT_IN_BZERO)
3847 fn = build_call_nofold (fndecl, 2, dest, len);
3848 else
3849 gcc_unreachable ();
3850 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3851 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3852 return expand_call (fn, target, target == const0_rtx);
3855 /* Expand expression EXP, which is a call to the bzero builtin. Return
3856 NULL_RTX if we failed the caller should emit a normal call. */
3858 static rtx
3859 expand_builtin_bzero (tree exp)
3861 tree dest, size;
3862 location_t loc = EXPR_LOCATION (exp);
3864 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3865 return NULL_RTX;
3867 dest = CALL_EXPR_ARG (exp, 0);
3868 size = CALL_EXPR_ARG (exp, 1);
3870 /* New argument list transforming bzero(ptr x, int y) to
3871 memset(ptr x, int 0, size_t y). This is done this way
3872 so that if it isn't expanded inline, we fallback to
3873 calling bzero instead of memset. */
3875 return expand_builtin_memset_args (dest, integer_zero_node,
3876 fold_convert_loc (loc, sizetype, size),
3877 const0_rtx, VOIDmode, exp);
3880 /* Expand expression EXP, which is a call to the memcmp built-in function.
3881 Return NULL_RTX if we failed and the
3882 caller should emit a normal call, otherwise try to get the result in
3883 TARGET, if convenient (and in mode MODE, if that's convenient). */
3885 static rtx
3886 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3887 ATTRIBUTE_UNUSED enum machine_mode mode)
3889 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3891 if (!validate_arglist (exp,
3892 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3893 return NULL_RTX;
3895 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3897 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3898 rtx result;
3899 rtx insn;
3900 tree arg1 = CALL_EXPR_ARG (exp, 0);
3901 tree arg2 = CALL_EXPR_ARG (exp, 1);
3902 tree len = CALL_EXPR_ARG (exp, 2);
3904 int arg1_align
3905 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3906 int arg2_align
3907 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3908 enum machine_mode insn_mode;
3910 #ifdef HAVE_cmpmemsi
3911 if (HAVE_cmpmemsi)
3912 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3913 else
3914 #endif
3915 #ifdef HAVE_cmpstrnsi
3916 if (HAVE_cmpstrnsi)
3917 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3918 else
3919 #endif
3920 return NULL_RTX;
3922 /* If we don't have POINTER_TYPE, call the function. */
3923 if (arg1_align == 0 || arg2_align == 0)
3924 return NULL_RTX;
3926 /* Make a place to write the result of the instruction. */
3927 result = target;
3928 if (! (result != 0
3929 && REG_P (result) && GET_MODE (result) == insn_mode
3930 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3931 result = gen_reg_rtx (insn_mode);
3933 arg1_rtx = get_memory_rtx (arg1, len);
3934 arg2_rtx = get_memory_rtx (arg2, len);
3935 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3937 /* Set MEM_SIZE as appropriate. */
3938 if (CONST_INT_P (arg3_rtx))
3940 set_mem_size (arg1_rtx, arg3_rtx);
3941 set_mem_size (arg2_rtx, arg3_rtx);
3944 #ifdef HAVE_cmpmemsi
3945 if (HAVE_cmpmemsi)
3946 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3947 GEN_INT (MIN (arg1_align, arg2_align)));
3948 else
3949 #endif
3950 #ifdef HAVE_cmpstrnsi
3951 if (HAVE_cmpstrnsi)
3952 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3953 GEN_INT (MIN (arg1_align, arg2_align)));
3954 else
3955 #endif
3956 gcc_unreachable ();
3958 if (insn)
3959 emit_insn (insn);
3960 else
3961 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3962 TYPE_MODE (integer_type_node), 3,
3963 XEXP (arg1_rtx, 0), Pmode,
3964 XEXP (arg2_rtx, 0), Pmode,
3965 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3966 TYPE_UNSIGNED (sizetype)),
3967 TYPE_MODE (sizetype));
3969 /* Return the value in the proper mode for this function. */
3970 mode = TYPE_MODE (TREE_TYPE (exp));
3971 if (GET_MODE (result) == mode)
3972 return result;
3973 else if (target != 0)
3975 convert_move (target, result, 0);
3976 return target;
3978 else
3979 return convert_to_mode (mode, result, 0);
3981 #endif
3983 return NULL_RTX;
3986 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3987 if we failed the caller should emit a normal call, otherwise try to get
3988 the result in TARGET, if convenient. */
3990 static rtx
3991 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3993 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3994 return NULL_RTX;
3996 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3997 if (cmpstr_optab[SImode] != CODE_FOR_nothing
3998 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4000 rtx arg1_rtx, arg2_rtx;
4001 rtx result, insn = NULL_RTX;
4002 tree fndecl, fn;
4003 tree arg1 = CALL_EXPR_ARG (exp, 0);
4004 tree arg2 = CALL_EXPR_ARG (exp, 1);
4006 int arg1_align
4007 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4008 int arg2_align
4009 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4011 /* If we don't have POINTER_TYPE, call the function. */
4012 if (arg1_align == 0 || arg2_align == 0)
4013 return NULL_RTX;
4015 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4016 arg1 = builtin_save_expr (arg1);
4017 arg2 = builtin_save_expr (arg2);
4019 arg1_rtx = get_memory_rtx (arg1, NULL);
4020 arg2_rtx = get_memory_rtx (arg2, NULL);
4022 #ifdef HAVE_cmpstrsi
4023 /* Try to call cmpstrsi. */
4024 if (HAVE_cmpstrsi)
4026 enum machine_mode insn_mode
4027 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4029 /* Make a place to write the result of the instruction. */
4030 result = target;
4031 if (! (result != 0
4032 && REG_P (result) && GET_MODE (result) == insn_mode
4033 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4034 result = gen_reg_rtx (insn_mode);
4036 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4037 GEN_INT (MIN (arg1_align, arg2_align)));
4039 #endif
4040 #ifdef HAVE_cmpstrnsi
4041 /* Try to determine at least one length and call cmpstrnsi. */
4042 if (!insn && HAVE_cmpstrnsi)
4044 tree len;
4045 rtx arg3_rtx;
4047 enum machine_mode insn_mode
4048 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4049 tree len1 = c_strlen (arg1, 1);
4050 tree len2 = c_strlen (arg2, 1);
4052 if (len1)
4053 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4054 if (len2)
4055 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4057 /* If we don't have a constant length for the first, use the length
4058 of the second, if we know it. We don't require a constant for
4059 this case; some cost analysis could be done if both are available
4060 but neither is constant. For now, assume they're equally cheap,
4061 unless one has side effects. If both strings have constant lengths,
4062 use the smaller. */
4064 if (!len1)
4065 len = len2;
4066 else if (!len2)
4067 len = len1;
4068 else if (TREE_SIDE_EFFECTS (len1))
4069 len = len2;
4070 else if (TREE_SIDE_EFFECTS (len2))
4071 len = len1;
4072 else if (TREE_CODE (len1) != INTEGER_CST)
4073 len = len2;
4074 else if (TREE_CODE (len2) != INTEGER_CST)
4075 len = len1;
4076 else if (tree_int_cst_lt (len1, len2))
4077 len = len1;
4078 else
4079 len = len2;
4081 /* If both arguments have side effects, we cannot optimize. */
4082 if (!len || TREE_SIDE_EFFECTS (len))
4083 goto do_libcall;
4085 arg3_rtx = expand_normal (len);
4087 /* Make a place to write the result of the instruction. */
4088 result = target;
4089 if (! (result != 0
4090 && REG_P (result) && GET_MODE (result) == insn_mode
4091 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4092 result = gen_reg_rtx (insn_mode);
4094 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4095 GEN_INT (MIN (arg1_align, arg2_align)));
4097 #endif
4099 if (insn)
4101 enum machine_mode mode;
4102 emit_insn (insn);
4104 /* Return the value in the proper mode for this function. */
4105 mode = TYPE_MODE (TREE_TYPE (exp));
4106 if (GET_MODE (result) == mode)
4107 return result;
4108 if (target == 0)
4109 return convert_to_mode (mode, result, 0);
4110 convert_move (target, result, 0);
4111 return target;
4114 /* Expand the library call ourselves using a stabilized argument
4115 list to avoid re-evaluating the function's arguments twice. */
4116 #ifdef HAVE_cmpstrnsi
4117 do_libcall:
4118 #endif
4119 fndecl = get_callee_fndecl (exp);
4120 fn = build_call_nofold (fndecl, 2, arg1, arg2);
4121 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4122 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4123 return expand_call (fn, target, target == const0_rtx);
4125 #endif
4126 return NULL_RTX;
4129 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4130 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4131 the result in TARGET, if convenient. */
4133 static rtx
4134 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4135 ATTRIBUTE_UNUSED enum machine_mode mode)
4137 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4139 if (!validate_arglist (exp,
4140 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4141 return NULL_RTX;
4143 /* If c_strlen can determine an expression for one of the string
4144 lengths, and it doesn't have side effects, then emit cmpstrnsi
4145 using length MIN(strlen(string)+1, arg3). */
4146 #ifdef HAVE_cmpstrnsi
4147 if (HAVE_cmpstrnsi)
4149 tree len, len1, len2;
4150 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4151 rtx result, insn;
4152 tree fndecl, fn;
4153 tree arg1 = CALL_EXPR_ARG (exp, 0);
4154 tree arg2 = CALL_EXPR_ARG (exp, 1);
4155 tree arg3 = CALL_EXPR_ARG (exp, 2);
4157 int arg1_align
4158 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4159 int arg2_align
4160 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4161 enum machine_mode insn_mode
4162 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4164 len1 = c_strlen (arg1, 1);
4165 len2 = c_strlen (arg2, 1);
4167 if (len1)
4168 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4169 if (len2)
4170 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4172 /* If we don't have a constant length for the first, use the length
4173 of the second, if we know it. We don't require a constant for
4174 this case; some cost analysis could be done if both are available
4175 but neither is constant. For now, assume they're equally cheap,
4176 unless one has side effects. If both strings have constant lengths,
4177 use the smaller. */
4179 if (!len1)
4180 len = len2;
4181 else if (!len2)
4182 len = len1;
4183 else if (TREE_SIDE_EFFECTS (len1))
4184 len = len2;
4185 else if (TREE_SIDE_EFFECTS (len2))
4186 len = len1;
4187 else if (TREE_CODE (len1) != INTEGER_CST)
4188 len = len2;
4189 else if (TREE_CODE (len2) != INTEGER_CST)
4190 len = len1;
4191 else if (tree_int_cst_lt (len1, len2))
4192 len = len1;
4193 else
4194 len = len2;
4196 /* If both arguments have side effects, we cannot optimize. */
4197 if (!len || TREE_SIDE_EFFECTS (len))
4198 return NULL_RTX;
4200 /* The actual new length parameter is MIN(len,arg3). */
4201 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4202 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4204 /* If we don't have POINTER_TYPE, call the function. */
4205 if (arg1_align == 0 || arg2_align == 0)
4206 return NULL_RTX;
4208 /* Make a place to write the result of the instruction. */
4209 result = target;
4210 if (! (result != 0
4211 && REG_P (result) && GET_MODE (result) == insn_mode
4212 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4213 result = gen_reg_rtx (insn_mode);
4215 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4216 arg1 = builtin_save_expr (arg1);
4217 arg2 = builtin_save_expr (arg2);
4218 len = builtin_save_expr (len);
4220 arg1_rtx = get_memory_rtx (arg1, len);
4221 arg2_rtx = get_memory_rtx (arg2, len);
4222 arg3_rtx = expand_normal (len);
4223 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4224 GEN_INT (MIN (arg1_align, arg2_align)));
4225 if (insn)
4227 emit_insn (insn);
4229 /* Return the value in the proper mode for this function. */
4230 mode = TYPE_MODE (TREE_TYPE (exp));
4231 if (GET_MODE (result) == mode)
4232 return result;
4233 if (target == 0)
4234 return convert_to_mode (mode, result, 0);
4235 convert_move (target, result, 0);
4236 return target;
4239 /* Expand the library call ourselves using a stabilized argument
4240 list to avoid re-evaluating the function's arguments twice. */
4241 fndecl = get_callee_fndecl (exp);
4242 fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
4243 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4244 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4245 return expand_call (fn, target, target == const0_rtx);
4247 #endif
4248 return NULL_RTX;
4251 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4252 if that's convenient. */
4255 expand_builtin_saveregs (void)
4257 rtx val, seq;
4259 /* Don't do __builtin_saveregs more than once in a function.
4260 Save the result of the first call and reuse it. */
4261 if (saveregs_value != 0)
4262 return saveregs_value;
4264 /* When this function is called, it means that registers must be
4265 saved on entry to this function. So we migrate the call to the
4266 first insn of this function. */
4268 start_sequence ();
4270 /* Do whatever the machine needs done in this case. */
4271 val = targetm.calls.expand_builtin_saveregs ();
4273 seq = get_insns ();
4274 end_sequence ();
4276 saveregs_value = val;
4278 /* Put the insns after the NOTE that starts the function. If this
4279 is inside a start_sequence, make the outer-level insn chain current, so
4280 the code is placed at the start of the function. */
4281 push_topmost_sequence ();
4282 emit_insn_after (seq, entry_of_function ());
4283 pop_topmost_sequence ();
4285 return val;
4288 /* __builtin_args_info (N) returns word N of the arg space info
4289 for the current function. The number and meanings of words
4290 is controlled by the definition of CUMULATIVE_ARGS. */
4292 static rtx
4293 expand_builtin_args_info (tree exp)
4295 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4296 int *word_ptr = (int *) &crtl->args.info;
4298 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4300 if (call_expr_nargs (exp) != 0)
4302 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4303 error ("argument of %<__builtin_args_info%> must be constant");
4304 else
4306 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4308 if (wordnum < 0 || wordnum >= nwords)
4309 error ("argument of %<__builtin_args_info%> out of range");
4310 else
4311 return GEN_INT (word_ptr[wordnum]);
4314 else
4315 error ("missing argument in %<__builtin_args_info%>");
4317 return const0_rtx;
4320 /* Expand a call to __builtin_next_arg. */
4322 static rtx
4323 expand_builtin_next_arg (void)
4325 /* Checking arguments is already done in fold_builtin_next_arg
4326 that must be called before this function. */
4327 return expand_binop (ptr_mode, add_optab,
4328 crtl->args.internal_arg_pointer,
4329 crtl->args.arg_offset_rtx,
4330 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4333 /* Make it easier for the backends by protecting the valist argument
4334 from multiple evaluations. */
4336 static tree
4337 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4339 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4341 gcc_assert (vatype != NULL_TREE);
4343 if (TREE_CODE (vatype) == ARRAY_TYPE)
4345 if (TREE_SIDE_EFFECTS (valist))
4346 valist = save_expr (valist);
4348 /* For this case, the backends will be expecting a pointer to
4349 vatype, but it's possible we've actually been given an array
4350 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4351 So fix it. */
4352 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4354 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4355 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4358 else
4360 tree pt;
4362 if (! needs_lvalue)
4364 if (! TREE_SIDE_EFFECTS (valist))
4365 return valist;
4367 pt = build_pointer_type (vatype);
4368 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4369 TREE_SIDE_EFFECTS (valist) = 1;
4372 if (TREE_SIDE_EFFECTS (valist))
4373 valist = save_expr (valist);
4374 valist = build_fold_indirect_ref_loc (loc, valist);
4377 return valist;
4380 /* The "standard" definition of va_list is void*. */
4382 tree
4383 std_build_builtin_va_list (void)
4385 return ptr_type_node;
4388 /* The "standard" abi va_list is va_list_type_node. */
4390 tree
4391 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4393 return va_list_type_node;
4396 /* The "standard" type of va_list is va_list_type_node. */
4398 tree
4399 std_canonical_va_list_type (tree type)
4401 tree wtype, htype;
4403 if (INDIRECT_REF_P (type))
4404 type = TREE_TYPE (type);
4405 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4406 type = TREE_TYPE (type);
4407 wtype = va_list_type_node;
4408 htype = type;
4409 /* Treat structure va_list types. */
4410 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4411 htype = TREE_TYPE (htype);
4412 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4414 /* If va_list is an array type, the argument may have decayed
4415 to a pointer type, e.g. by being passed to another function.
4416 In that case, unwrap both types so that we can compare the
4417 underlying records. */
4418 if (TREE_CODE (htype) == ARRAY_TYPE
4419 || POINTER_TYPE_P (htype))
4421 wtype = TREE_TYPE (wtype);
4422 htype = TREE_TYPE (htype);
4425 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4426 return va_list_type_node;
4428 return NULL_TREE;
4431 /* The "standard" implementation of va_start: just assign `nextarg' to
4432 the variable. */
4434 void
4435 std_expand_builtin_va_start (tree valist, rtx nextarg)
4437 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4438 convert_move (va_r, nextarg, 0);
4441 /* Expand EXP, a call to __builtin_va_start. */
4443 static rtx
4444 expand_builtin_va_start (tree exp)
4446 rtx nextarg;
4447 tree valist;
4448 location_t loc = EXPR_LOCATION (exp);
4450 if (call_expr_nargs (exp) < 2)
4452 error_at (loc, "too few arguments to function %<va_start%>");
4453 return const0_rtx;
4456 if (fold_builtin_next_arg (exp, true))
4457 return const0_rtx;
4459 nextarg = expand_builtin_next_arg ();
4460 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4462 if (targetm.expand_builtin_va_start)
4463 targetm.expand_builtin_va_start (valist, nextarg);
4464 else
4465 std_expand_builtin_va_start (valist, nextarg);
4467 return const0_rtx;
4470 /* The "standard" implementation of va_arg: read the value from the
4471 current (padded) address and increment by the (padded) size. */
4473 tree
4474 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4475 gimple_seq *post_p)
4477 tree addr, t, type_size, rounded_size, valist_tmp;
4478 unsigned HOST_WIDE_INT align, boundary;
4479 bool indirect;
4481 #ifdef ARGS_GROW_DOWNWARD
4482 /* All of the alignment and movement below is for args-grow-up machines.
4483 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4484 implement their own specialized gimplify_va_arg_expr routines. */
4485 gcc_unreachable ();
4486 #endif
4488 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4489 if (indirect)
4490 type = build_pointer_type (type);
4492 align = PARM_BOUNDARY / BITS_PER_UNIT;
4493 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4495 /* When we align parameter on stack for caller, if the parameter
4496 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4497 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4498 here with caller. */
4499 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4500 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4502 boundary /= BITS_PER_UNIT;
4504 /* Hoist the valist value into a temporary for the moment. */
4505 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4507 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4508 requires greater alignment, we must perform dynamic alignment. */
4509 if (boundary > align
4510 && !integer_zerop (TYPE_SIZE (type)))
4512 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4513 fold_build2 (POINTER_PLUS_EXPR,
4514 TREE_TYPE (valist),
4515 valist_tmp, size_int (boundary - 1)));
4516 gimplify_and_add (t, pre_p);
4518 t = fold_convert (sizetype, valist_tmp);
4519 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4520 fold_convert (TREE_TYPE (valist),
4521 fold_build2 (BIT_AND_EXPR, sizetype, t,
4522 size_int (-boundary))));
4523 gimplify_and_add (t, pre_p);
4525 else
4526 boundary = align;
4528 /* If the actual alignment is less than the alignment of the type,
4529 adjust the type accordingly so that we don't assume strict alignment
4530 when dereferencing the pointer. */
4531 boundary *= BITS_PER_UNIT;
4532 if (boundary < TYPE_ALIGN (type))
4534 type = build_variant_type_copy (type);
4535 TYPE_ALIGN (type) = boundary;
4538 /* Compute the rounded size of the type. */
4539 type_size = size_in_bytes (type);
4540 rounded_size = round_up (type_size, align);
4542 /* Reduce rounded_size so it's sharable with the postqueue. */
4543 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4545 /* Get AP. */
4546 addr = valist_tmp;
4547 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4549 /* Small args are padded downward. */
4550 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4551 rounded_size, size_int (align));
4552 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4553 size_binop (MINUS_EXPR, rounded_size, type_size));
4554 addr = fold_build2 (POINTER_PLUS_EXPR,
4555 TREE_TYPE (addr), addr, t);
4558 /* Compute new value for AP. */
4559 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4560 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4561 gimplify_and_add (t, pre_p);
4563 addr = fold_convert (build_pointer_type (type), addr);
4565 if (indirect)
4566 addr = build_va_arg_indirect_ref (addr);
4568 return build_va_arg_indirect_ref (addr);
4571 /* Build an indirect-ref expression over the given TREE, which represents a
4572 piece of a va_arg() expansion. */
4573 tree
4574 build_va_arg_indirect_ref (tree addr)
4576 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4578 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4579 mf_mark (addr);
4581 return addr;
4584 /* Return a dummy expression of type TYPE in order to keep going after an
4585 error. */
4587 static tree
4588 dummy_object (tree type)
4590 tree t = build_int_cst (build_pointer_type (type), 0);
4591 return build1 (INDIRECT_REF, type, t);
4594 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4595 builtin function, but a very special sort of operator. */
4597 enum gimplify_status
4598 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4600 tree promoted_type, have_va_type;
4601 tree valist = TREE_OPERAND (*expr_p, 0);
4602 tree type = TREE_TYPE (*expr_p);
4603 tree t;
4604 location_t loc = EXPR_LOCATION (*expr_p);
4606 /* Verify that valist is of the proper type. */
4607 have_va_type = TREE_TYPE (valist);
4608 if (have_va_type == error_mark_node)
4609 return GS_ERROR;
4610 have_va_type = targetm.canonical_va_list_type (have_va_type);
4612 if (have_va_type == NULL_TREE)
4614 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4615 return GS_ERROR;
4618 /* Generate a diagnostic for requesting data of a type that cannot
4619 be passed through `...' due to type promotion at the call site. */
4620 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4621 != type)
4623 static bool gave_help;
4624 bool warned;
4626 /* Unfortunately, this is merely undefined, rather than a constraint
4627 violation, so we cannot make this an error. If this call is never
4628 executed, the program is still strictly conforming. */
4629 warned = warning_at (loc, 0,
4630 "%qT is promoted to %qT when passed through %<...%>",
4631 type, promoted_type);
4632 if (!gave_help && warned)
4634 gave_help = true;
4635 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4636 promoted_type, type);
4639 /* We can, however, treat "undefined" any way we please.
4640 Call abort to encourage the user to fix the program. */
4641 if (warned)
4642 inform (loc, "if this code is reached, the program will abort");
4643 /* Before the abort, allow the evaluation of the va_list
4644 expression to exit or longjmp. */
4645 gimplify_and_add (valist, pre_p);
4646 t = build_call_expr_loc (loc,
4647 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4648 gimplify_and_add (t, pre_p);
4650 /* This is dead code, but go ahead and finish so that the
4651 mode of the result comes out right. */
4652 *expr_p = dummy_object (type);
4653 return GS_ALL_DONE;
4655 else
4657 /* Make it easier for the backends by protecting the valist argument
4658 from multiple evaluations. */
4659 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4661 /* For this case, the backends will be expecting a pointer to
4662 TREE_TYPE (abi), but it's possible we've
4663 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4664 So fix it. */
4665 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4667 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4668 valist = fold_convert_loc (loc, p1,
4669 build_fold_addr_expr_loc (loc, valist));
4672 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4674 else
4675 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4677 if (!targetm.gimplify_va_arg_expr)
4678 /* FIXME: Once most targets are converted we should merely
4679 assert this is non-null. */
4680 return GS_ALL_DONE;
4682 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4683 return GS_OK;
4687 /* Expand EXP, a call to __builtin_va_end. */
4689 static rtx
4690 expand_builtin_va_end (tree exp)
4692 tree valist = CALL_EXPR_ARG (exp, 0);
4694 /* Evaluate for side effects, if needed. I hate macros that don't
4695 do that. */
4696 if (TREE_SIDE_EFFECTS (valist))
4697 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4699 return const0_rtx;
4702 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4703 builtin rather than just as an assignment in stdarg.h because of the
4704 nastiness of array-type va_list types. */
4706 static rtx
4707 expand_builtin_va_copy (tree exp)
4709 tree dst, src, t;
4710 location_t loc = EXPR_LOCATION (exp);
4712 dst = CALL_EXPR_ARG (exp, 0);
4713 src = CALL_EXPR_ARG (exp, 1);
4715 dst = stabilize_va_list_loc (loc, dst, 1);
4716 src = stabilize_va_list_loc (loc, src, 0);
4718 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4720 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4722 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4723 TREE_SIDE_EFFECTS (t) = 1;
4724 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4726 else
4728 rtx dstb, srcb, size;
4730 /* Evaluate to pointers. */
4731 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4732 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4733 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4734 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4736 dstb = convert_memory_address (Pmode, dstb);
4737 srcb = convert_memory_address (Pmode, srcb);
4739 /* "Dereference" to BLKmode memories. */
4740 dstb = gen_rtx_MEM (BLKmode, dstb);
4741 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4742 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4743 srcb = gen_rtx_MEM (BLKmode, srcb);
4744 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4745 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4747 /* Copy. */
4748 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4751 return const0_rtx;
4754 /* Expand a call to one of the builtin functions __builtin_frame_address or
4755 __builtin_return_address. */
4757 static rtx
4758 expand_builtin_frame_address (tree fndecl, tree exp)
4760 /* The argument must be a nonnegative integer constant.
4761 It counts the number of frames to scan up the stack.
4762 The value is the return address saved in that frame. */
4763 if (call_expr_nargs (exp) == 0)
4764 /* Warning about missing arg was already issued. */
4765 return const0_rtx;
4766 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4768 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4769 error ("invalid argument to %<__builtin_frame_address%>");
4770 else
4771 error ("invalid argument to %<__builtin_return_address%>");
4772 return const0_rtx;
4774 else
4776 rtx tem
4777 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4778 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4780 /* Some ports cannot access arbitrary stack frames. */
4781 if (tem == NULL)
4783 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4784 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4785 else
4786 warning (0, "unsupported argument to %<__builtin_return_address%>");
4787 return const0_rtx;
4790 /* For __builtin_frame_address, return what we've got. */
4791 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4792 return tem;
4794 if (!REG_P (tem)
4795 && ! CONSTANT_P (tem))
4796 tem = copy_to_mode_reg (Pmode, tem);
4797 return tem;
4801 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4802 we failed and the caller should emit a normal call, otherwise try to get
4803 the result in TARGET, if convenient. */
4805 static rtx
4806 expand_builtin_alloca (tree exp, rtx target)
4808 rtx op0;
4809 rtx result;
4811 /* Emit normal call if marked not-inlineable. */
4812 if (CALL_CANNOT_INLINE_P (exp))
4813 return NULL_RTX;
4815 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4816 return NULL_RTX;
4818 /* Compute the argument. */
4819 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4821 /* Allocate the desired space. */
4822 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4823 result = convert_memory_address (ptr_mode, result);
4825 return result;
4828 /* Expand a call to a bswap builtin with argument ARG0. MODE
4829 is the mode to expand with. */
4831 static rtx
4832 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4834 enum machine_mode mode;
4835 tree arg;
4836 rtx op0;
4838 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4839 return NULL_RTX;
4841 arg = CALL_EXPR_ARG (exp, 0);
4842 mode = TYPE_MODE (TREE_TYPE (arg));
4843 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4845 target = expand_unop (mode, bswap_optab, op0, target, 1);
4847 gcc_assert (target);
4849 return convert_to_mode (mode, target, 0);
4852 /* Expand a call to a unary builtin in EXP.
4853 Return NULL_RTX if a normal call should be emitted rather than expanding the
4854 function in-line. If convenient, the result should be placed in TARGET.
4855 SUBTARGET may be used as the target for computing one of EXP's operands. */
4857 static rtx
4858 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4859 rtx subtarget, optab op_optab)
4861 rtx op0;
4863 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4864 return NULL_RTX;
4866 /* Compute the argument. */
4867 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4868 VOIDmode, EXPAND_NORMAL);
4869 /* Compute op, into TARGET if possible.
4870 Set TARGET to wherever the result comes back. */
4871 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4872 op_optab, op0, target, 1);
4873 gcc_assert (target);
4875 return convert_to_mode (target_mode, target, 0);
4878 /* Expand a call to __builtin_expect. We just return our argument
4879 as the builtin_expect semantic should've been already executed by
4880 tree branch prediction pass. */
4882 static rtx
4883 expand_builtin_expect (tree exp, rtx target)
4885 tree arg;
4887 if (call_expr_nargs (exp) < 2)
4888 return const0_rtx;
4889 arg = CALL_EXPR_ARG (exp, 0);
4891 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4892 /* When guessing was done, the hints should be already stripped away. */
4893 gcc_assert (!flag_guess_branch_prob
4894 || optimize == 0 || errorcount || sorrycount);
4895 return target;
4898 void
4899 expand_builtin_trap (void)
4901 #ifdef HAVE_trap
4902 if (HAVE_trap)
4903 emit_insn (gen_trap ());
4904 else
4905 #endif
4906 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4907 emit_barrier ();
4910 /* Expand a call to __builtin_unreachable. We do nothing except emit
4911 a barrier saying that control flow will not pass here.
4913 It is the responsibility of the program being compiled to ensure
4914 that control flow does never reach __builtin_unreachable. */
4915 static void
4916 expand_builtin_unreachable (void)
4918 emit_barrier ();
4921 /* Expand EXP, a call to fabs, fabsf or fabsl.
4922 Return NULL_RTX if a normal call should be emitted rather than expanding
4923 the function inline. If convenient, the result should be placed
4924 in TARGET. SUBTARGET may be used as the target for computing
4925 the operand. */
4927 static rtx
4928 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4930 enum machine_mode mode;
4931 tree arg;
4932 rtx op0;
4934 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4935 return NULL_RTX;
4937 arg = CALL_EXPR_ARG (exp, 0);
4938 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4939 mode = TYPE_MODE (TREE_TYPE (arg));
4940 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4941 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4944 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4945 Return NULL is a normal call should be emitted rather than expanding the
4946 function inline. If convenient, the result should be placed in TARGET.
4947 SUBTARGET may be used as the target for computing the operand. */
4949 static rtx
4950 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4952 rtx op0, op1;
4953 tree arg;
4955 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4956 return NULL_RTX;
4958 arg = CALL_EXPR_ARG (exp, 0);
4959 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4961 arg = CALL_EXPR_ARG (exp, 1);
4962 op1 = expand_normal (arg);
4964 return expand_copysign (op0, op1, target);
4967 /* Create a new constant string literal and return a char* pointer to it.
4968 The STRING_CST value is the LEN characters at STR. */
4969 tree
4970 build_string_literal (int len, const char *str)
4972 tree t, elem, index, type;
4974 t = build_string (len, str);
4975 elem = build_type_variant (char_type_node, 1, 0);
4976 index = build_index_type (size_int (len - 1));
4977 type = build_array_type (elem, index);
4978 TREE_TYPE (t) = type;
4979 TREE_CONSTANT (t) = 1;
4980 TREE_READONLY (t) = 1;
4981 TREE_STATIC (t) = 1;
4983 type = build_pointer_type (elem);
4984 t = build1 (ADDR_EXPR, type,
4985 build4 (ARRAY_REF, elem,
4986 t, integer_zero_node, NULL_TREE, NULL_TREE));
4987 return t;
4990 /* Expand a call to either the entry or exit function profiler. */
4992 static rtx
4993 expand_builtin_profile_func (bool exitp)
4995 rtx this_rtx, which;
4997 this_rtx = DECL_RTL (current_function_decl);
4998 gcc_assert (MEM_P (this_rtx));
4999 this_rtx = XEXP (this_rtx, 0);
5001 if (exitp)
5002 which = profile_function_exit_libfunc;
5003 else
5004 which = profile_function_entry_libfunc;
5006 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5007 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5009 Pmode);
5011 return const0_rtx;
5014 /* Expand a call to __builtin___clear_cache. */
5016 static rtx
5017 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5019 #ifndef HAVE_clear_cache
5020 #ifdef CLEAR_INSN_CACHE
5021 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5022 does something. Just do the default expansion to a call to
5023 __clear_cache(). */
5024 return NULL_RTX;
5025 #else
5026 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5027 does nothing. There is no need to call it. Do nothing. */
5028 return const0_rtx;
5029 #endif /* CLEAR_INSN_CACHE */
5030 #else
5031 /* We have a "clear_cache" insn, and it will handle everything. */
5032 tree begin, end;
5033 rtx begin_rtx, end_rtx;
5034 enum insn_code icode;
5036 /* We must not expand to a library call. If we did, any
5037 fallback library function in libgcc that might contain a call to
5038 __builtin___clear_cache() would recurse infinitely. */
5039 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5041 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5042 return const0_rtx;
5045 if (HAVE_clear_cache)
5047 icode = CODE_FOR_clear_cache;
5049 begin = CALL_EXPR_ARG (exp, 0);
5050 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5051 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5052 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5053 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5055 end = CALL_EXPR_ARG (exp, 1);
5056 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5057 end_rtx = convert_memory_address (Pmode, end_rtx);
5058 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5059 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5061 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5063 return const0_rtx;
5064 #endif /* HAVE_clear_cache */
5067 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5069 static rtx
5070 round_trampoline_addr (rtx tramp)
5072 rtx temp, addend, mask;
5074 /* If we don't need too much alignment, we'll have been guaranteed
5075 proper alignment by get_trampoline_type. */
5076 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5077 return tramp;
5079 /* Round address up to desired boundary. */
5080 temp = gen_reg_rtx (Pmode);
5081 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5082 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5084 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5085 temp, 0, OPTAB_LIB_WIDEN);
5086 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5087 temp, 0, OPTAB_LIB_WIDEN);
5089 return tramp;
5092 static rtx
5093 expand_builtin_init_trampoline (tree exp)
5095 tree t_tramp, t_func, t_chain;
5096 rtx m_tramp, r_tramp, r_chain, tmp;
5098 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5099 POINTER_TYPE, VOID_TYPE))
5100 return NULL_RTX;
5102 t_tramp = CALL_EXPR_ARG (exp, 0);
5103 t_func = CALL_EXPR_ARG (exp, 1);
5104 t_chain = CALL_EXPR_ARG (exp, 2);
5106 r_tramp = expand_normal (t_tramp);
5107 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5108 MEM_NOTRAP_P (m_tramp) = 1;
5110 /* The TRAMP argument should be the address of a field within the
5111 local function's FRAME decl. Let's see if we can fill in the
5112 to fill in the MEM_ATTRs for this memory. */
5113 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5114 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5115 true, 0);
5117 tmp = round_trampoline_addr (r_tramp);
5118 if (tmp != r_tramp)
5120 m_tramp = change_address (m_tramp, BLKmode, tmp);
5121 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5122 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5125 /* The FUNC argument should be the address of the nested function.
5126 Extract the actual function decl to pass to the hook. */
5127 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5128 t_func = TREE_OPERAND (t_func, 0);
5129 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5131 r_chain = expand_normal (t_chain);
5133 /* Generate insns to initialize the trampoline. */
5134 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5136 trampolines_created = 1;
5137 return const0_rtx;
5140 static rtx
5141 expand_builtin_adjust_trampoline (tree exp)
5143 rtx tramp;
5145 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5146 return NULL_RTX;
5148 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5149 tramp = round_trampoline_addr (tramp);
5150 if (targetm.calls.trampoline_adjust_address)
5151 tramp = targetm.calls.trampoline_adjust_address (tramp);
5153 return tramp;
5156 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5157 function. The function first checks whether the back end provides
5158 an insn to implement signbit for the respective mode. If not, it
5159 checks whether the floating point format of the value is such that
5160 the sign bit can be extracted. If that is not the case, the
5161 function returns NULL_RTX to indicate that a normal call should be
5162 emitted rather than expanding the function in-line. EXP is the
5163 expression that is a call to the builtin function; if convenient,
5164 the result should be placed in TARGET. */
5165 static rtx
5166 expand_builtin_signbit (tree exp, rtx target)
5168 const struct real_format *fmt;
5169 enum machine_mode fmode, imode, rmode;
5170 HOST_WIDE_INT hi, lo;
5171 tree arg;
5172 int word, bitpos;
5173 enum insn_code icode;
5174 rtx temp;
5175 location_t loc = EXPR_LOCATION (exp);
5177 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5178 return NULL_RTX;
5180 arg = CALL_EXPR_ARG (exp, 0);
5181 fmode = TYPE_MODE (TREE_TYPE (arg));
5182 rmode = TYPE_MODE (TREE_TYPE (exp));
5183 fmt = REAL_MODE_FORMAT (fmode);
5185 arg = builtin_save_expr (arg);
5187 /* Expand the argument yielding a RTX expression. */
5188 temp = expand_normal (arg);
5190 /* Check if the back end provides an insn that handles signbit for the
5191 argument's mode. */
5192 icode = signbit_optab->handlers [(int) fmode].insn_code;
5193 if (icode != CODE_FOR_nothing)
5195 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5196 emit_unop_insn (icode, target, temp, UNKNOWN);
5197 return target;
5200 /* For floating point formats without a sign bit, implement signbit
5201 as "ARG < 0.0". */
5202 bitpos = fmt->signbit_ro;
5203 if (bitpos < 0)
5205 /* But we can't do this if the format supports signed zero. */
5206 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5207 return NULL_RTX;
5209 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5210 build_real (TREE_TYPE (arg), dconst0));
5211 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5214 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5216 imode = int_mode_for_mode (fmode);
5217 if (imode == BLKmode)
5218 return NULL_RTX;
5219 temp = gen_lowpart (imode, temp);
5221 else
5223 imode = word_mode;
5224 /* Handle targets with different FP word orders. */
5225 if (FLOAT_WORDS_BIG_ENDIAN)
5226 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5227 else
5228 word = bitpos / BITS_PER_WORD;
5229 temp = operand_subword_force (temp, word, fmode);
5230 bitpos = bitpos % BITS_PER_WORD;
5233 /* Force the intermediate word_mode (or narrower) result into a
5234 register. This avoids attempting to create paradoxical SUBREGs
5235 of floating point modes below. */
5236 temp = force_reg (imode, temp);
5238 /* If the bitpos is within the "result mode" lowpart, the operation
5239 can be implement with a single bitwise AND. Otherwise, we need
5240 a right shift and an AND. */
5242 if (bitpos < GET_MODE_BITSIZE (rmode))
5244 if (bitpos < HOST_BITS_PER_WIDE_INT)
5246 hi = 0;
5247 lo = (HOST_WIDE_INT) 1 << bitpos;
5249 else
5251 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5252 lo = 0;
5255 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5256 temp = gen_lowpart (rmode, temp);
5257 temp = expand_binop (rmode, and_optab, temp,
5258 immed_double_const (lo, hi, rmode),
5259 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5261 else
5263 /* Perform a logical right shift to place the signbit in the least
5264 significant bit, then truncate the result to the desired mode
5265 and mask just this bit. */
5266 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5267 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5268 temp = gen_lowpart (rmode, temp);
5269 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5270 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5273 return temp;
5276 /* Expand fork or exec calls. TARGET is the desired target of the
5277 call. EXP is the call. FN is the
5278 identificator of the actual function. IGNORE is nonzero if the
5279 value is to be ignored. */
5281 static rtx
5282 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5284 tree id, decl;
5285 tree call;
5287 /* If we are not profiling, just call the function. */
5288 if (!profile_arc_flag)
5289 return NULL_RTX;
5291 /* Otherwise call the wrapper. This should be equivalent for the rest of
5292 compiler, so the code does not diverge, and the wrapper may run the
5293 code necessary for keeping the profiling sane. */
5295 switch (DECL_FUNCTION_CODE (fn))
5297 case BUILT_IN_FORK:
5298 id = get_identifier ("__gcov_fork");
5299 break;
5301 case BUILT_IN_EXECL:
5302 id = get_identifier ("__gcov_execl");
5303 break;
5305 case BUILT_IN_EXECV:
5306 id = get_identifier ("__gcov_execv");
5307 break;
5309 case BUILT_IN_EXECLP:
5310 id = get_identifier ("__gcov_execlp");
5311 break;
5313 case BUILT_IN_EXECLE:
5314 id = get_identifier ("__gcov_execle");
5315 break;
5317 case BUILT_IN_EXECVP:
5318 id = get_identifier ("__gcov_execvp");
5319 break;
5321 case BUILT_IN_EXECVE:
5322 id = get_identifier ("__gcov_execve");
5323 break;
5325 default:
5326 gcc_unreachable ();
5329 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5330 FUNCTION_DECL, id, TREE_TYPE (fn));
5331 DECL_EXTERNAL (decl) = 1;
5332 TREE_PUBLIC (decl) = 1;
5333 DECL_ARTIFICIAL (decl) = 1;
5334 TREE_NOTHROW (decl) = 1;
5335 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5336 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5337 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5338 return expand_call (call, target, ignore);
5343 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5344 the pointer in these functions is void*, the tree optimizers may remove
5345 casts. The mode computed in expand_builtin isn't reliable either, due
5346 to __sync_bool_compare_and_swap.
5348 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5349 group of builtins. This gives us log2 of the mode size. */
5351 static inline enum machine_mode
5352 get_builtin_sync_mode (int fcode_diff)
5354 /* The size is not negotiable, so ask not to get BLKmode in return
5355 if the target indicates that a smaller size would be better. */
5356 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5359 /* Expand the memory expression LOC and return the appropriate memory operand
5360 for the builtin_sync operations. */
5362 static rtx
5363 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5365 rtx addr, mem;
5367 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5368 addr = convert_memory_address (Pmode, addr);
5370 /* Note that we explicitly do not want any alias information for this
5371 memory, so that we kill all other live memories. Otherwise we don't
5372 satisfy the full barrier semantics of the intrinsic. */
5373 mem = validize_mem (gen_rtx_MEM (mode, addr));
5375 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5376 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5377 MEM_VOLATILE_P (mem) = 1;
5379 return mem;
5382 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5383 EXP is the CALL_EXPR. CODE is the rtx code
5384 that corresponds to the arithmetic or logical operation from the name;
5385 an exception here is that NOT actually means NAND. TARGET is an optional
5386 place for us to store the results; AFTER is true if this is the
5387 fetch_and_xxx form. IGNORE is true if we don't actually care about
5388 the result of the operation at all. */
5390 static rtx
5391 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5392 enum rtx_code code, bool after,
5393 rtx target, bool ignore)
5395 rtx val, mem;
5396 enum machine_mode old_mode;
5397 location_t loc = EXPR_LOCATION (exp);
5399 if (code == NOT && warn_sync_nand)
5401 tree fndecl = get_callee_fndecl (exp);
5402 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5404 static bool warned_f_a_n, warned_n_a_f;
5406 switch (fcode)
5408 case BUILT_IN_FETCH_AND_NAND_1:
5409 case BUILT_IN_FETCH_AND_NAND_2:
5410 case BUILT_IN_FETCH_AND_NAND_4:
5411 case BUILT_IN_FETCH_AND_NAND_8:
5412 case BUILT_IN_FETCH_AND_NAND_16:
5414 if (warned_f_a_n)
5415 break;
5417 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5418 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5419 warned_f_a_n = true;
5420 break;
5422 case BUILT_IN_NAND_AND_FETCH_1:
5423 case BUILT_IN_NAND_AND_FETCH_2:
5424 case BUILT_IN_NAND_AND_FETCH_4:
5425 case BUILT_IN_NAND_AND_FETCH_8:
5426 case BUILT_IN_NAND_AND_FETCH_16:
5428 if (warned_n_a_f)
5429 break;
5431 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5432 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5433 warned_n_a_f = true;
5434 break;
5436 default:
5437 gcc_unreachable ();
5441 /* Expand the operands. */
5442 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5444 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5445 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5446 of CONST_INTs, where we know the old_mode only from the call argument. */
5447 old_mode = GET_MODE (val);
5448 if (old_mode == VOIDmode)
5449 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5450 val = convert_modes (mode, old_mode, val, 1);
5452 if (ignore)
5453 return expand_sync_operation (mem, val, code);
5454 else
5455 return expand_sync_fetch_operation (mem, val, code, after, target);
5458 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5459 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5460 true if this is the boolean form. TARGET is a place for us to store the
5461 results; this is NOT optional if IS_BOOL is true. */
5463 static rtx
5464 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5465 bool is_bool, rtx target)
5467 rtx old_val, new_val, mem;
5468 enum machine_mode old_mode;
5470 /* Expand the operands. */
5471 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5474 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5475 mode, EXPAND_NORMAL);
5476 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5477 of CONST_INTs, where we know the old_mode only from the call argument. */
5478 old_mode = GET_MODE (old_val);
5479 if (old_mode == VOIDmode)
5480 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5481 old_val = convert_modes (mode, old_mode, old_val, 1);
5483 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5484 mode, EXPAND_NORMAL);
5485 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5486 of CONST_INTs, where we know the old_mode only from the call argument. */
5487 old_mode = GET_MODE (new_val);
5488 if (old_mode == VOIDmode)
5489 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5490 new_val = convert_modes (mode, old_mode, new_val, 1);
5492 if (is_bool)
5493 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5494 else
5495 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5498 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5499 general form is actually an atomic exchange, and some targets only
5500 support a reduced form with the second argument being a constant 1.
5501 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5502 the results. */
5504 static rtx
5505 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5506 rtx target)
5508 rtx val, mem;
5509 enum machine_mode old_mode;
5511 /* Expand the operands. */
5512 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5513 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5514 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5515 of CONST_INTs, where we know the old_mode only from the call argument. */
5516 old_mode = GET_MODE (val);
5517 if (old_mode == VOIDmode)
5518 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5519 val = convert_modes (mode, old_mode, val, 1);
5521 return expand_sync_lock_test_and_set (mem, val, target);
5524 /* Expand the __sync_synchronize intrinsic. */
5526 static void
5527 expand_builtin_synchronize (void)
5529 gimple x;
5530 VEC (tree, gc) *v_clobbers;
5532 #ifdef HAVE_memory_barrier
5533 if (HAVE_memory_barrier)
5535 emit_insn (gen_memory_barrier ());
5536 return;
5538 #endif
5540 if (synchronize_libfunc != NULL_RTX)
5542 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5543 return;
5546 /* If no explicit memory barrier instruction is available, create an
5547 empty asm stmt with a memory clobber. */
5548 v_clobbers = VEC_alloc (tree, gc, 1);
5549 VEC_quick_push (tree, v_clobbers,
5550 tree_cons (NULL, build_string (6, "memory"), NULL));
5551 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5552 gimple_asm_set_volatile (x, true);
5553 expand_asm_stmt (x);
5556 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5558 static void
5559 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5561 enum insn_code icode;
5562 rtx mem, insn;
5563 rtx val = const0_rtx;
5565 /* Expand the operands. */
5566 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5568 /* If there is an explicit operation in the md file, use it. */
5569 icode = sync_lock_release[mode];
5570 if (icode != CODE_FOR_nothing)
5572 if (!insn_data[icode].operand[1].predicate (val, mode))
5573 val = force_reg (mode, val);
5575 insn = GEN_FCN (icode) (mem, val);
5576 if (insn)
5578 emit_insn (insn);
5579 return;
5583 /* Otherwise we can implement this operation by emitting a barrier
5584 followed by a store of zero. */
5585 expand_builtin_synchronize ();
5586 emit_move_insn (mem, val);
5589 /* Expand an expression EXP that calls a built-in function,
5590 with result going to TARGET if that's convenient
5591 (and in mode MODE if that's convenient).
5592 SUBTARGET may be used as the target for computing one of EXP's operands.
5593 IGNORE is nonzero if the value is to be ignored. */
5596 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5597 int ignore)
5599 tree fndecl = get_callee_fndecl (exp);
5600 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5601 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5603 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5604 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5606 /* When not optimizing, generate calls to library functions for a certain
5607 set of builtins. */
5608 if (!optimize
5609 && !called_as_built_in (fndecl)
5610 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5611 && fcode != BUILT_IN_ALLOCA
5612 && fcode != BUILT_IN_FREE)
5613 return expand_call (exp, target, ignore);
5615 /* The built-in function expanders test for target == const0_rtx
5616 to determine whether the function's result will be ignored. */
5617 if (ignore)
5618 target = const0_rtx;
5620 /* If the result of a pure or const built-in function is ignored, and
5621 none of its arguments are volatile, we can avoid expanding the
5622 built-in call and just evaluate the arguments for side-effects. */
5623 if (target == const0_rtx
5624 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5626 bool volatilep = false;
5627 tree arg;
5628 call_expr_arg_iterator iter;
5630 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5631 if (TREE_THIS_VOLATILE (arg))
5633 volatilep = true;
5634 break;
5637 if (! volatilep)
5639 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5640 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5641 return const0_rtx;
5645 switch (fcode)
5647 CASE_FLT_FN (BUILT_IN_FABS):
5648 target = expand_builtin_fabs (exp, target, subtarget);
5649 if (target)
5650 return target;
5651 break;
5653 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5654 target = expand_builtin_copysign (exp, target, subtarget);
5655 if (target)
5656 return target;
5657 break;
5659 /* Just do a normal library call if we were unable to fold
5660 the values. */
5661 CASE_FLT_FN (BUILT_IN_CABS):
5662 break;
5664 CASE_FLT_FN (BUILT_IN_EXP):
5665 CASE_FLT_FN (BUILT_IN_EXP10):
5666 CASE_FLT_FN (BUILT_IN_POW10):
5667 CASE_FLT_FN (BUILT_IN_EXP2):
5668 CASE_FLT_FN (BUILT_IN_EXPM1):
5669 CASE_FLT_FN (BUILT_IN_LOGB):
5670 CASE_FLT_FN (BUILT_IN_LOG):
5671 CASE_FLT_FN (BUILT_IN_LOG10):
5672 CASE_FLT_FN (BUILT_IN_LOG2):
5673 CASE_FLT_FN (BUILT_IN_LOG1P):
5674 CASE_FLT_FN (BUILT_IN_TAN):
5675 CASE_FLT_FN (BUILT_IN_ASIN):
5676 CASE_FLT_FN (BUILT_IN_ACOS):
5677 CASE_FLT_FN (BUILT_IN_ATAN):
5678 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5679 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5680 because of possible accuracy problems. */
5681 if (! flag_unsafe_math_optimizations)
5682 break;
5683 CASE_FLT_FN (BUILT_IN_SQRT):
5684 CASE_FLT_FN (BUILT_IN_FLOOR):
5685 CASE_FLT_FN (BUILT_IN_CEIL):
5686 CASE_FLT_FN (BUILT_IN_TRUNC):
5687 CASE_FLT_FN (BUILT_IN_ROUND):
5688 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5689 CASE_FLT_FN (BUILT_IN_RINT):
5690 target = expand_builtin_mathfn (exp, target, subtarget);
5691 if (target)
5692 return target;
5693 break;
5695 CASE_FLT_FN (BUILT_IN_ILOGB):
5696 if (! flag_unsafe_math_optimizations)
5697 break;
5698 CASE_FLT_FN (BUILT_IN_ISINF):
5699 CASE_FLT_FN (BUILT_IN_FINITE):
5700 case BUILT_IN_ISFINITE:
5701 case BUILT_IN_ISNORMAL:
5702 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5703 if (target)
5704 return target;
5705 break;
5707 CASE_FLT_FN (BUILT_IN_LCEIL):
5708 CASE_FLT_FN (BUILT_IN_LLCEIL):
5709 CASE_FLT_FN (BUILT_IN_LFLOOR):
5710 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5711 target = expand_builtin_int_roundingfn (exp, target);
5712 if (target)
5713 return target;
5714 break;
5716 CASE_FLT_FN (BUILT_IN_LRINT):
5717 CASE_FLT_FN (BUILT_IN_LLRINT):
5718 CASE_FLT_FN (BUILT_IN_LROUND):
5719 CASE_FLT_FN (BUILT_IN_LLROUND):
5720 target = expand_builtin_int_roundingfn_2 (exp, target);
5721 if (target)
5722 return target;
5723 break;
5725 CASE_FLT_FN (BUILT_IN_POW):
5726 target = expand_builtin_pow (exp, target, subtarget);
5727 if (target)
5728 return target;
5729 break;
5731 CASE_FLT_FN (BUILT_IN_POWI):
5732 target = expand_builtin_powi (exp, target, subtarget);
5733 if (target)
5734 return target;
5735 break;
5737 CASE_FLT_FN (BUILT_IN_ATAN2):
5738 CASE_FLT_FN (BUILT_IN_LDEXP):
5739 CASE_FLT_FN (BUILT_IN_SCALB):
5740 CASE_FLT_FN (BUILT_IN_SCALBN):
5741 CASE_FLT_FN (BUILT_IN_SCALBLN):
5742 if (! flag_unsafe_math_optimizations)
5743 break;
5745 CASE_FLT_FN (BUILT_IN_FMOD):
5746 CASE_FLT_FN (BUILT_IN_REMAINDER):
5747 CASE_FLT_FN (BUILT_IN_DREM):
5748 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5749 if (target)
5750 return target;
5751 break;
5753 CASE_FLT_FN (BUILT_IN_CEXPI):
5754 target = expand_builtin_cexpi (exp, target, subtarget);
5755 gcc_assert (target);
5756 return target;
5758 CASE_FLT_FN (BUILT_IN_SIN):
5759 CASE_FLT_FN (BUILT_IN_COS):
5760 if (! flag_unsafe_math_optimizations)
5761 break;
5762 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5763 if (target)
5764 return target;
5765 break;
5767 CASE_FLT_FN (BUILT_IN_SINCOS):
5768 if (! flag_unsafe_math_optimizations)
5769 break;
5770 target = expand_builtin_sincos (exp);
5771 if (target)
5772 return target;
5773 break;
5775 case BUILT_IN_APPLY_ARGS:
5776 return expand_builtin_apply_args ();
5778 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5779 FUNCTION with a copy of the parameters described by
5780 ARGUMENTS, and ARGSIZE. It returns a block of memory
5781 allocated on the stack into which is stored all the registers
5782 that might possibly be used for returning the result of a
5783 function. ARGUMENTS is the value returned by
5784 __builtin_apply_args. ARGSIZE is the number of bytes of
5785 arguments that must be copied. ??? How should this value be
5786 computed? We'll also need a safe worst case value for varargs
5787 functions. */
5788 case BUILT_IN_APPLY:
5789 if (!validate_arglist (exp, POINTER_TYPE,
5790 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5791 && !validate_arglist (exp, REFERENCE_TYPE,
5792 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5793 return const0_rtx;
5794 else
5796 rtx ops[3];
5798 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5799 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5800 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5802 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5805 /* __builtin_return (RESULT) causes the function to return the
5806 value described by RESULT. RESULT is address of the block of
5807 memory returned by __builtin_apply. */
5808 case BUILT_IN_RETURN:
5809 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5810 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5811 return const0_rtx;
5813 case BUILT_IN_SAVEREGS:
5814 return expand_builtin_saveregs ();
5816 case BUILT_IN_ARGS_INFO:
5817 return expand_builtin_args_info (exp);
5819 case BUILT_IN_VA_ARG_PACK:
5820 /* All valid uses of __builtin_va_arg_pack () are removed during
5821 inlining. */
5822 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5823 return const0_rtx;
5825 case BUILT_IN_VA_ARG_PACK_LEN:
5826 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5827 inlining. */
5828 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5829 return const0_rtx;
5831 /* Return the address of the first anonymous stack arg. */
5832 case BUILT_IN_NEXT_ARG:
5833 if (fold_builtin_next_arg (exp, false))
5834 return const0_rtx;
5835 return expand_builtin_next_arg ();
5837 case BUILT_IN_CLEAR_CACHE:
5838 target = expand_builtin___clear_cache (exp);
5839 if (target)
5840 return target;
5841 break;
5843 case BUILT_IN_CLASSIFY_TYPE:
5844 return expand_builtin_classify_type (exp);
5846 case BUILT_IN_CONSTANT_P:
5847 return const0_rtx;
5849 case BUILT_IN_FRAME_ADDRESS:
5850 case BUILT_IN_RETURN_ADDRESS:
5851 return expand_builtin_frame_address (fndecl, exp);
5853 /* Returns the address of the area where the structure is returned.
5854 0 otherwise. */
5855 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5856 if (call_expr_nargs (exp) != 0
5857 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5858 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5859 return const0_rtx;
5860 else
5861 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5863 case BUILT_IN_ALLOCA:
5864 target = expand_builtin_alloca (exp, target);
5865 if (target)
5866 return target;
5867 break;
5869 case BUILT_IN_STACK_SAVE:
5870 return expand_stack_save ();
5872 case BUILT_IN_STACK_RESTORE:
5873 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5874 return const0_rtx;
5876 case BUILT_IN_BSWAP32:
5877 case BUILT_IN_BSWAP64:
5878 target = expand_builtin_bswap (exp, target, subtarget);
5880 if (target)
5881 return target;
5882 break;
5884 CASE_INT_FN (BUILT_IN_FFS):
5885 case BUILT_IN_FFSIMAX:
5886 target = expand_builtin_unop (target_mode, exp, target,
5887 subtarget, ffs_optab);
5888 if (target)
5889 return target;
5890 break;
5892 CASE_INT_FN (BUILT_IN_CLZ):
5893 case BUILT_IN_CLZIMAX:
5894 target = expand_builtin_unop (target_mode, exp, target,
5895 subtarget, clz_optab);
5896 if (target)
5897 return target;
5898 break;
5900 CASE_INT_FN (BUILT_IN_CTZ):
5901 case BUILT_IN_CTZIMAX:
5902 target = expand_builtin_unop (target_mode, exp, target,
5903 subtarget, ctz_optab);
5904 if (target)
5905 return target;
5906 break;
5908 CASE_INT_FN (BUILT_IN_POPCOUNT):
5909 case BUILT_IN_POPCOUNTIMAX:
5910 target = expand_builtin_unop (target_mode, exp, target,
5911 subtarget, popcount_optab);
5912 if (target)
5913 return target;
5914 break;
5916 CASE_INT_FN (BUILT_IN_PARITY):
5917 case BUILT_IN_PARITYIMAX:
5918 target = expand_builtin_unop (target_mode, exp, target,
5919 subtarget, parity_optab);
5920 if (target)
5921 return target;
5922 break;
5924 case BUILT_IN_STRLEN:
5925 target = expand_builtin_strlen (exp, target, target_mode);
5926 if (target)
5927 return target;
5928 break;
5930 case BUILT_IN_STRCPY:
5931 target = expand_builtin_strcpy (exp, target);
5932 if (target)
5933 return target;
5934 break;
5936 case BUILT_IN_STRNCPY:
5937 target = expand_builtin_strncpy (exp, target);
5938 if (target)
5939 return target;
5940 break;
5942 case BUILT_IN_STPCPY:
5943 target = expand_builtin_stpcpy (exp, target, mode);
5944 if (target)
5945 return target;
5946 break;
5948 case BUILT_IN_MEMCPY:
5949 target = expand_builtin_memcpy (exp, target);
5950 if (target)
5951 return target;
5952 break;
5954 case BUILT_IN_MEMPCPY:
5955 target = expand_builtin_mempcpy (exp, target, mode);
5956 if (target)
5957 return target;
5958 break;
5960 case BUILT_IN_MEMSET:
5961 target = expand_builtin_memset (exp, target, mode);
5962 if (target)
5963 return target;
5964 break;
5966 case BUILT_IN_BZERO:
5967 target = expand_builtin_bzero (exp);
5968 if (target)
5969 return target;
5970 break;
5972 case BUILT_IN_STRCMP:
5973 target = expand_builtin_strcmp (exp, target);
5974 if (target)
5975 return target;
5976 break;
5978 case BUILT_IN_STRNCMP:
5979 target = expand_builtin_strncmp (exp, target, mode);
5980 if (target)
5981 return target;
5982 break;
5984 case BUILT_IN_BCMP:
5985 case BUILT_IN_MEMCMP:
5986 target = expand_builtin_memcmp (exp, target, mode);
5987 if (target)
5988 return target;
5989 break;
5991 case BUILT_IN_SETJMP:
5992 /* This should have been lowered to the builtins below. */
5993 gcc_unreachable ();
5995 case BUILT_IN_SETJMP_SETUP:
5996 /* __builtin_setjmp_setup is passed a pointer to an array of five words
5997 and the receiver label. */
5998 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6000 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6001 VOIDmode, EXPAND_NORMAL);
6002 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6003 rtx label_r = label_rtx (label);
6005 /* This is copied from the handling of non-local gotos. */
6006 expand_builtin_setjmp_setup (buf_addr, label_r);
6007 nonlocal_goto_handler_labels
6008 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6009 nonlocal_goto_handler_labels);
6010 /* ??? Do not let expand_label treat us as such since we would
6011 not want to be both on the list of non-local labels and on
6012 the list of forced labels. */
6013 FORCED_LABEL (label) = 0;
6014 return const0_rtx;
6016 break;
6018 case BUILT_IN_SETJMP_DISPATCHER:
6019 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6020 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6022 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6023 rtx label_r = label_rtx (label);
6025 /* Remove the dispatcher label from the list of non-local labels
6026 since the receiver labels have been added to it above. */
6027 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6028 return const0_rtx;
6030 break;
6032 case BUILT_IN_SETJMP_RECEIVER:
6033 /* __builtin_setjmp_receiver is passed the receiver label. */
6034 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6036 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6037 rtx label_r = label_rtx (label);
6039 expand_builtin_setjmp_receiver (label_r);
6040 return const0_rtx;
6042 break;
6044 /* __builtin_longjmp is passed a pointer to an array of five words.
6045 It's similar to the C library longjmp function but works with
6046 __builtin_setjmp above. */
6047 case BUILT_IN_LONGJMP:
6048 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6050 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6051 VOIDmode, EXPAND_NORMAL);
6052 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6054 if (value != const1_rtx)
6056 error ("%<__builtin_longjmp%> second argument must be 1");
6057 return const0_rtx;
6060 expand_builtin_longjmp (buf_addr, value);
6061 return const0_rtx;
6063 break;
6065 case BUILT_IN_NONLOCAL_GOTO:
6066 target = expand_builtin_nonlocal_goto (exp);
6067 if (target)
6068 return target;
6069 break;
6071 /* This updates the setjmp buffer that is its argument with the value
6072 of the current stack pointer. */
6073 case BUILT_IN_UPDATE_SETJMP_BUF:
6074 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6076 rtx buf_addr
6077 = expand_normal (CALL_EXPR_ARG (exp, 0));
6079 expand_builtin_update_setjmp_buf (buf_addr);
6080 return const0_rtx;
6082 break;
6084 case BUILT_IN_TRAP:
6085 expand_builtin_trap ();
6086 return const0_rtx;
6088 case BUILT_IN_UNREACHABLE:
6089 expand_builtin_unreachable ();
6090 return const0_rtx;
6092 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6093 case BUILT_IN_SIGNBITD32:
6094 case BUILT_IN_SIGNBITD64:
6095 case BUILT_IN_SIGNBITD128:
6096 target = expand_builtin_signbit (exp, target);
6097 if (target)
6098 return target;
6099 break;
6101 /* Various hooks for the DWARF 2 __throw routine. */
6102 case BUILT_IN_UNWIND_INIT:
6103 expand_builtin_unwind_init ();
6104 return const0_rtx;
6105 case BUILT_IN_DWARF_CFA:
6106 return virtual_cfa_rtx;
6107 #ifdef DWARF2_UNWIND_INFO
6108 case BUILT_IN_DWARF_SP_COLUMN:
6109 return expand_builtin_dwarf_sp_column ();
6110 case BUILT_IN_INIT_DWARF_REG_SIZES:
6111 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6112 return const0_rtx;
6113 #endif
6114 case BUILT_IN_FROB_RETURN_ADDR:
6115 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6116 case BUILT_IN_EXTRACT_RETURN_ADDR:
6117 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6118 case BUILT_IN_EH_RETURN:
6119 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6120 CALL_EXPR_ARG (exp, 1));
6121 return const0_rtx;
6122 #ifdef EH_RETURN_DATA_REGNO
6123 case BUILT_IN_EH_RETURN_DATA_REGNO:
6124 return expand_builtin_eh_return_data_regno (exp);
6125 #endif
6126 case BUILT_IN_EXTEND_POINTER:
6127 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6128 case BUILT_IN_EH_POINTER:
6129 return expand_builtin_eh_pointer (exp);
6130 case BUILT_IN_EH_FILTER:
6131 return expand_builtin_eh_filter (exp);
6132 case BUILT_IN_EH_COPY_VALUES:
6133 return expand_builtin_eh_copy_values (exp);
6135 case BUILT_IN_VA_START:
6136 return expand_builtin_va_start (exp);
6137 case BUILT_IN_VA_END:
6138 return expand_builtin_va_end (exp);
6139 case BUILT_IN_VA_COPY:
6140 return expand_builtin_va_copy (exp);
6141 case BUILT_IN_EXPECT:
6142 return expand_builtin_expect (exp, target);
6143 case BUILT_IN_PREFETCH:
6144 expand_builtin_prefetch (exp);
6145 return const0_rtx;
6147 case BUILT_IN_PROFILE_FUNC_ENTER:
6148 return expand_builtin_profile_func (false);
6149 case BUILT_IN_PROFILE_FUNC_EXIT:
6150 return expand_builtin_profile_func (true);
6152 case BUILT_IN_INIT_TRAMPOLINE:
6153 return expand_builtin_init_trampoline (exp);
6154 case BUILT_IN_ADJUST_TRAMPOLINE:
6155 return expand_builtin_adjust_trampoline (exp);
6157 case BUILT_IN_FORK:
6158 case BUILT_IN_EXECL:
6159 case BUILT_IN_EXECV:
6160 case BUILT_IN_EXECLP:
6161 case BUILT_IN_EXECLE:
6162 case BUILT_IN_EXECVP:
6163 case BUILT_IN_EXECVE:
6164 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6165 if (target)
6166 return target;
6167 break;
6169 case BUILT_IN_FETCH_AND_ADD_1:
6170 case BUILT_IN_FETCH_AND_ADD_2:
6171 case BUILT_IN_FETCH_AND_ADD_4:
6172 case BUILT_IN_FETCH_AND_ADD_8:
6173 case BUILT_IN_FETCH_AND_ADD_16:
6174 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6175 target = expand_builtin_sync_operation (mode, exp, PLUS,
6176 false, target, ignore);
6177 if (target)
6178 return target;
6179 break;
6181 case BUILT_IN_FETCH_AND_SUB_1:
6182 case BUILT_IN_FETCH_AND_SUB_2:
6183 case BUILT_IN_FETCH_AND_SUB_4:
6184 case BUILT_IN_FETCH_AND_SUB_8:
6185 case BUILT_IN_FETCH_AND_SUB_16:
6186 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6187 target = expand_builtin_sync_operation (mode, exp, MINUS,
6188 false, target, ignore);
6189 if (target)
6190 return target;
6191 break;
6193 case BUILT_IN_FETCH_AND_OR_1:
6194 case BUILT_IN_FETCH_AND_OR_2:
6195 case BUILT_IN_FETCH_AND_OR_4:
6196 case BUILT_IN_FETCH_AND_OR_8:
6197 case BUILT_IN_FETCH_AND_OR_16:
6198 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6199 target = expand_builtin_sync_operation (mode, exp, IOR,
6200 false, target, ignore);
6201 if (target)
6202 return target;
6203 break;
6205 case BUILT_IN_FETCH_AND_AND_1:
6206 case BUILT_IN_FETCH_AND_AND_2:
6207 case BUILT_IN_FETCH_AND_AND_4:
6208 case BUILT_IN_FETCH_AND_AND_8:
6209 case BUILT_IN_FETCH_AND_AND_16:
6210 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6211 target = expand_builtin_sync_operation (mode, exp, AND,
6212 false, target, ignore);
6213 if (target)
6214 return target;
6215 break;
6217 case BUILT_IN_FETCH_AND_XOR_1:
6218 case BUILT_IN_FETCH_AND_XOR_2:
6219 case BUILT_IN_FETCH_AND_XOR_4:
6220 case BUILT_IN_FETCH_AND_XOR_8:
6221 case BUILT_IN_FETCH_AND_XOR_16:
6222 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6223 target = expand_builtin_sync_operation (mode, exp, XOR,
6224 false, target, ignore);
6225 if (target)
6226 return target;
6227 break;
6229 case BUILT_IN_FETCH_AND_NAND_1:
6230 case BUILT_IN_FETCH_AND_NAND_2:
6231 case BUILT_IN_FETCH_AND_NAND_4:
6232 case BUILT_IN_FETCH_AND_NAND_8:
6233 case BUILT_IN_FETCH_AND_NAND_16:
6234 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6235 target = expand_builtin_sync_operation (mode, exp, NOT,
6236 false, target, ignore);
6237 if (target)
6238 return target;
6239 break;
6241 case BUILT_IN_ADD_AND_FETCH_1:
6242 case BUILT_IN_ADD_AND_FETCH_2:
6243 case BUILT_IN_ADD_AND_FETCH_4:
6244 case BUILT_IN_ADD_AND_FETCH_8:
6245 case BUILT_IN_ADD_AND_FETCH_16:
6246 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6247 target = expand_builtin_sync_operation (mode, exp, PLUS,
6248 true, target, ignore);
6249 if (target)
6250 return target;
6251 break;
6253 case BUILT_IN_SUB_AND_FETCH_1:
6254 case BUILT_IN_SUB_AND_FETCH_2:
6255 case BUILT_IN_SUB_AND_FETCH_4:
6256 case BUILT_IN_SUB_AND_FETCH_8:
6257 case BUILT_IN_SUB_AND_FETCH_16:
6258 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6259 target = expand_builtin_sync_operation (mode, exp, MINUS,
6260 true, target, ignore);
6261 if (target)
6262 return target;
6263 break;
6265 case BUILT_IN_OR_AND_FETCH_1:
6266 case BUILT_IN_OR_AND_FETCH_2:
6267 case BUILT_IN_OR_AND_FETCH_4:
6268 case BUILT_IN_OR_AND_FETCH_8:
6269 case BUILT_IN_OR_AND_FETCH_16:
6270 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6271 target = expand_builtin_sync_operation (mode, exp, IOR,
6272 true, target, ignore);
6273 if (target)
6274 return target;
6275 break;
6277 case BUILT_IN_AND_AND_FETCH_1:
6278 case BUILT_IN_AND_AND_FETCH_2:
6279 case BUILT_IN_AND_AND_FETCH_4:
6280 case BUILT_IN_AND_AND_FETCH_8:
6281 case BUILT_IN_AND_AND_FETCH_16:
6282 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6283 target = expand_builtin_sync_operation (mode, exp, AND,
6284 true, target, ignore);
6285 if (target)
6286 return target;
6287 break;
6289 case BUILT_IN_XOR_AND_FETCH_1:
6290 case BUILT_IN_XOR_AND_FETCH_2:
6291 case BUILT_IN_XOR_AND_FETCH_4:
6292 case BUILT_IN_XOR_AND_FETCH_8:
6293 case BUILT_IN_XOR_AND_FETCH_16:
6294 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6295 target = expand_builtin_sync_operation (mode, exp, XOR,
6296 true, target, ignore);
6297 if (target)
6298 return target;
6299 break;
6301 case BUILT_IN_NAND_AND_FETCH_1:
6302 case BUILT_IN_NAND_AND_FETCH_2:
6303 case BUILT_IN_NAND_AND_FETCH_4:
6304 case BUILT_IN_NAND_AND_FETCH_8:
6305 case BUILT_IN_NAND_AND_FETCH_16:
6306 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6307 target = expand_builtin_sync_operation (mode, exp, NOT,
6308 true, target, ignore);
6309 if (target)
6310 return target;
6311 break;
6313 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6314 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6315 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6316 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6317 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6318 if (mode == VOIDmode)
6319 mode = TYPE_MODE (boolean_type_node);
6320 if (!target || !register_operand (target, mode))
6321 target = gen_reg_rtx (mode);
6323 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6324 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6325 if (target)
6326 return target;
6327 break;
6329 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6330 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6331 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6332 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6333 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6334 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6335 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6336 if (target)
6337 return target;
6338 break;
6340 case BUILT_IN_LOCK_TEST_AND_SET_1:
6341 case BUILT_IN_LOCK_TEST_AND_SET_2:
6342 case BUILT_IN_LOCK_TEST_AND_SET_4:
6343 case BUILT_IN_LOCK_TEST_AND_SET_8:
6344 case BUILT_IN_LOCK_TEST_AND_SET_16:
6345 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6346 target = expand_builtin_lock_test_and_set (mode, exp, target);
6347 if (target)
6348 return target;
6349 break;
6351 case BUILT_IN_LOCK_RELEASE_1:
6352 case BUILT_IN_LOCK_RELEASE_2:
6353 case BUILT_IN_LOCK_RELEASE_4:
6354 case BUILT_IN_LOCK_RELEASE_8:
6355 case BUILT_IN_LOCK_RELEASE_16:
6356 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6357 expand_builtin_lock_release (mode, exp);
6358 return const0_rtx;
6360 case BUILT_IN_SYNCHRONIZE:
6361 expand_builtin_synchronize ();
6362 return const0_rtx;
6364 case BUILT_IN_OBJECT_SIZE:
6365 return expand_builtin_object_size (exp);
6367 case BUILT_IN_MEMCPY_CHK:
6368 case BUILT_IN_MEMPCPY_CHK:
6369 case BUILT_IN_MEMMOVE_CHK:
6370 case BUILT_IN_MEMSET_CHK:
6371 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6372 if (target)
6373 return target;
6374 break;
6376 case BUILT_IN_STRCPY_CHK:
6377 case BUILT_IN_STPCPY_CHK:
6378 case BUILT_IN_STRNCPY_CHK:
6379 case BUILT_IN_STRCAT_CHK:
6380 case BUILT_IN_STRNCAT_CHK:
6381 case BUILT_IN_SNPRINTF_CHK:
6382 case BUILT_IN_VSNPRINTF_CHK:
6383 maybe_emit_chk_warning (exp, fcode);
6384 break;
6386 case BUILT_IN_SPRINTF_CHK:
6387 case BUILT_IN_VSPRINTF_CHK:
6388 maybe_emit_sprintf_chk_warning (exp, fcode);
6389 break;
6391 case BUILT_IN_FREE:
6392 maybe_emit_free_warning (exp);
6393 break;
6395 default: /* just do library call, if unknown builtin */
6396 break;
6399 /* The switch statement above can drop through to cause the function
6400 to be called normally. */
6401 return expand_call (exp, target, ignore);
6404 /* Determine whether a tree node represents a call to a built-in
6405 function. If the tree T is a call to a built-in function with
6406 the right number of arguments of the appropriate types, return
6407 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6408 Otherwise the return value is END_BUILTINS. */
6410 enum built_in_function
6411 builtin_mathfn_code (const_tree t)
6413 const_tree fndecl, arg, parmlist;
6414 const_tree argtype, parmtype;
6415 const_call_expr_arg_iterator iter;
6417 if (TREE_CODE (t) != CALL_EXPR
6418 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6419 return END_BUILTINS;
6421 fndecl = get_callee_fndecl (t);
6422 if (fndecl == NULL_TREE
6423 || TREE_CODE (fndecl) != FUNCTION_DECL
6424 || ! DECL_BUILT_IN (fndecl)
6425 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6426 return END_BUILTINS;
6428 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6429 init_const_call_expr_arg_iterator (t, &iter);
6430 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6432 /* If a function doesn't take a variable number of arguments,
6433 the last element in the list will have type `void'. */
6434 parmtype = TREE_VALUE (parmlist);
6435 if (VOID_TYPE_P (parmtype))
6437 if (more_const_call_expr_args_p (&iter))
6438 return END_BUILTINS;
6439 return DECL_FUNCTION_CODE (fndecl);
6442 if (! more_const_call_expr_args_p (&iter))
6443 return END_BUILTINS;
6445 arg = next_const_call_expr_arg (&iter);
6446 argtype = TREE_TYPE (arg);
6448 if (SCALAR_FLOAT_TYPE_P (parmtype))
6450 if (! SCALAR_FLOAT_TYPE_P (argtype))
6451 return END_BUILTINS;
6453 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6455 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6456 return END_BUILTINS;
6458 else if (POINTER_TYPE_P (parmtype))
6460 if (! POINTER_TYPE_P (argtype))
6461 return END_BUILTINS;
6463 else if (INTEGRAL_TYPE_P (parmtype))
6465 if (! INTEGRAL_TYPE_P (argtype))
6466 return END_BUILTINS;
6468 else
6469 return END_BUILTINS;
6472 /* Variable-length argument list. */
6473 return DECL_FUNCTION_CODE (fndecl);
6476 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6477 evaluate to a constant. */
6479 static tree
6480 fold_builtin_constant_p (tree arg)
6482 /* We return 1 for a numeric type that's known to be a constant
6483 value at compile-time or for an aggregate type that's a
6484 literal constant. */
6485 STRIP_NOPS (arg);
6487 /* If we know this is a constant, emit the constant of one. */
6488 if (CONSTANT_CLASS_P (arg)
6489 || (TREE_CODE (arg) == CONSTRUCTOR
6490 && TREE_CONSTANT (arg)))
6491 return integer_one_node;
6492 if (TREE_CODE (arg) == ADDR_EXPR)
6494 tree op = TREE_OPERAND (arg, 0);
6495 if (TREE_CODE (op) == STRING_CST
6496 || (TREE_CODE (op) == ARRAY_REF
6497 && integer_zerop (TREE_OPERAND (op, 1))
6498 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6499 return integer_one_node;
6502 /* If this expression has side effects, show we don't know it to be a
6503 constant. Likewise if it's a pointer or aggregate type since in
6504 those case we only want literals, since those are only optimized
6505 when generating RTL, not later.
6506 And finally, if we are compiling an initializer, not code, we
6507 need to return a definite result now; there's not going to be any
6508 more optimization done. */
6509 if (TREE_SIDE_EFFECTS (arg)
6510 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6511 || POINTER_TYPE_P (TREE_TYPE (arg))
6512 || cfun == 0
6513 || folding_initializer)
6514 return integer_zero_node;
6516 return NULL_TREE;
6519 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6520 return it as a truthvalue. */
6522 static tree
6523 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6525 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6527 fn = built_in_decls[BUILT_IN_EXPECT];
6528 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6529 ret_type = TREE_TYPE (TREE_TYPE (fn));
6530 pred_type = TREE_VALUE (arg_types);
6531 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6533 pred = fold_convert_loc (loc, pred_type, pred);
6534 expected = fold_convert_loc (loc, expected_type, expected);
6535 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6537 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6538 build_int_cst (ret_type, 0));
6541 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6542 NULL_TREE if no simplification is possible. */
6544 static tree
6545 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6547 tree inner, fndecl;
6548 enum tree_code code;
6550 /* If this is a builtin_expect within a builtin_expect keep the
6551 inner one. See through a comparison against a constant. It
6552 might have been added to create a thruthvalue. */
6553 inner = arg0;
6554 if (COMPARISON_CLASS_P (inner)
6555 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6556 inner = TREE_OPERAND (inner, 0);
6558 if (TREE_CODE (inner) == CALL_EXPR
6559 && (fndecl = get_callee_fndecl (inner))
6560 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6561 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6562 return arg0;
6564 /* Distribute the expected value over short-circuiting operators.
6565 See through the cast from truthvalue_type_node to long. */
6566 inner = arg0;
6567 while (TREE_CODE (inner) == NOP_EXPR
6568 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6569 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6570 inner = TREE_OPERAND (inner, 0);
6572 code = TREE_CODE (inner);
6573 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6575 tree op0 = TREE_OPERAND (inner, 0);
6576 tree op1 = TREE_OPERAND (inner, 1);
6578 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6579 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6580 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6582 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6585 /* If the argument isn't invariant then there's nothing else we can do. */
6586 if (!TREE_CONSTANT (arg0))
6587 return NULL_TREE;
6589 /* If we expect that a comparison against the argument will fold to
6590 a constant return the constant. In practice, this means a true
6591 constant or the address of a non-weak symbol. */
6592 inner = arg0;
6593 STRIP_NOPS (inner);
6594 if (TREE_CODE (inner) == ADDR_EXPR)
6598 inner = TREE_OPERAND (inner, 0);
6600 while (TREE_CODE (inner) == COMPONENT_REF
6601 || TREE_CODE (inner) == ARRAY_REF);
6602 if ((TREE_CODE (inner) == VAR_DECL
6603 || TREE_CODE (inner) == FUNCTION_DECL)
6604 && DECL_WEAK (inner))
6605 return NULL_TREE;
6608 /* Otherwise, ARG0 already has the proper type for the return value. */
6609 return arg0;
6612 /* Fold a call to __builtin_classify_type with argument ARG. */
6614 static tree
6615 fold_builtin_classify_type (tree arg)
6617 if (arg == 0)
6618 return build_int_cst (NULL_TREE, no_type_class);
6620 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6623 /* Fold a call to __builtin_strlen with argument ARG. */
6625 static tree
6626 fold_builtin_strlen (location_t loc, tree type, tree arg)
6628 if (!validate_arg (arg, POINTER_TYPE))
6629 return NULL_TREE;
6630 else
6632 tree len = c_strlen (arg, 0);
6634 if (len)
6635 return fold_convert_loc (loc, type, len);
6637 return NULL_TREE;
6641 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6643 static tree
6644 fold_builtin_inf (location_t loc, tree type, int warn)
6646 REAL_VALUE_TYPE real;
6648 /* __builtin_inff is intended to be usable to define INFINITY on all
6649 targets. If an infinity is not available, INFINITY expands "to a
6650 positive constant of type float that overflows at translation
6651 time", footnote "In this case, using INFINITY will violate the
6652 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6653 Thus we pedwarn to ensure this constraint violation is
6654 diagnosed. */
6655 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6656 pedwarn (loc, 0, "target format does not support infinity");
6658 real_inf (&real);
6659 return build_real (type, real);
6662 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6664 static tree
6665 fold_builtin_nan (tree arg, tree type, int quiet)
6667 REAL_VALUE_TYPE real;
6668 const char *str;
6670 if (!validate_arg (arg, POINTER_TYPE))
6671 return NULL_TREE;
6672 str = c_getstr (arg);
6673 if (!str)
6674 return NULL_TREE;
6676 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6677 return NULL_TREE;
6679 return build_real (type, real);
6682 /* Return true if the floating point expression T has an integer value.
6683 We also allow +Inf, -Inf and NaN to be considered integer values. */
6685 static bool
6686 integer_valued_real_p (tree t)
6688 switch (TREE_CODE (t))
6690 case FLOAT_EXPR:
6691 return true;
6693 case ABS_EXPR:
6694 case SAVE_EXPR:
6695 return integer_valued_real_p (TREE_OPERAND (t, 0));
6697 case COMPOUND_EXPR:
6698 case MODIFY_EXPR:
6699 case BIND_EXPR:
6700 return integer_valued_real_p (TREE_OPERAND (t, 1));
6702 case PLUS_EXPR:
6703 case MINUS_EXPR:
6704 case MULT_EXPR:
6705 case MIN_EXPR:
6706 case MAX_EXPR:
6707 return integer_valued_real_p (TREE_OPERAND (t, 0))
6708 && integer_valued_real_p (TREE_OPERAND (t, 1));
6710 case COND_EXPR:
6711 return integer_valued_real_p (TREE_OPERAND (t, 1))
6712 && integer_valued_real_p (TREE_OPERAND (t, 2));
6714 case REAL_CST:
6715 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6717 case NOP_EXPR:
6719 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6720 if (TREE_CODE (type) == INTEGER_TYPE)
6721 return true;
6722 if (TREE_CODE (type) == REAL_TYPE)
6723 return integer_valued_real_p (TREE_OPERAND (t, 0));
6724 break;
6727 case CALL_EXPR:
6728 switch (builtin_mathfn_code (t))
6730 CASE_FLT_FN (BUILT_IN_CEIL):
6731 CASE_FLT_FN (BUILT_IN_FLOOR):
6732 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6733 CASE_FLT_FN (BUILT_IN_RINT):
6734 CASE_FLT_FN (BUILT_IN_ROUND):
6735 CASE_FLT_FN (BUILT_IN_TRUNC):
6736 return true;
6738 CASE_FLT_FN (BUILT_IN_FMIN):
6739 CASE_FLT_FN (BUILT_IN_FMAX):
6740 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6741 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6743 default:
6744 break;
6746 break;
6748 default:
6749 break;
6751 return false;
6754 /* FNDECL is assumed to be a builtin where truncation can be propagated
6755 across (for instance floor((double)f) == (double)floorf (f).
6756 Do the transformation for a call with argument ARG. */
6758 static tree
6759 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6761 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6763 if (!validate_arg (arg, REAL_TYPE))
6764 return NULL_TREE;
6766 /* Integer rounding functions are idempotent. */
6767 if (fcode == builtin_mathfn_code (arg))
6768 return arg;
6770 /* If argument is already integer valued, and we don't need to worry
6771 about setting errno, there's no need to perform rounding. */
6772 if (! flag_errno_math && integer_valued_real_p (arg))
6773 return arg;
6775 if (optimize)
6777 tree arg0 = strip_float_extensions (arg);
6778 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6779 tree newtype = TREE_TYPE (arg0);
6780 tree decl;
6782 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6783 && (decl = mathfn_built_in (newtype, fcode)))
6784 return fold_convert_loc (loc, ftype,
6785 build_call_expr_loc (loc, decl, 1,
6786 fold_convert_loc (loc,
6787 newtype,
6788 arg0)));
6790 return NULL_TREE;
6793 /* FNDECL is assumed to be builtin which can narrow the FP type of
6794 the argument, for instance lround((double)f) -> lroundf (f).
6795 Do the transformation for a call with argument ARG. */
6797 static tree
6798 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6800 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6802 if (!validate_arg (arg, REAL_TYPE))
6803 return NULL_TREE;
6805 /* If argument is already integer valued, and we don't need to worry
6806 about setting errno, there's no need to perform rounding. */
6807 if (! flag_errno_math && integer_valued_real_p (arg))
6808 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6809 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6811 if (optimize)
6813 tree ftype = TREE_TYPE (arg);
6814 tree arg0 = strip_float_extensions (arg);
6815 tree newtype = TREE_TYPE (arg0);
6816 tree decl;
6818 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6819 && (decl = mathfn_built_in (newtype, fcode)))
6820 return build_call_expr_loc (loc, decl, 1,
6821 fold_convert_loc (loc, newtype, arg0));
6824 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6825 sizeof (long long) == sizeof (long). */
6826 if (TYPE_PRECISION (long_long_integer_type_node)
6827 == TYPE_PRECISION (long_integer_type_node))
6829 tree newfn = NULL_TREE;
6830 switch (fcode)
6832 CASE_FLT_FN (BUILT_IN_LLCEIL):
6833 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6834 break;
6836 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6837 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6838 break;
6840 CASE_FLT_FN (BUILT_IN_LLROUND):
6841 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6842 break;
6844 CASE_FLT_FN (BUILT_IN_LLRINT):
6845 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6846 break;
6848 default:
6849 break;
6852 if (newfn)
6854 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6855 return fold_convert_loc (loc,
6856 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6860 return NULL_TREE;
6863 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6864 return type. Return NULL_TREE if no simplification can be made. */
6866 static tree
6867 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6869 tree res;
6871 if (!validate_arg (arg, COMPLEX_TYPE)
6872 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6873 return NULL_TREE;
6875 /* Calculate the result when the argument is a constant. */
6876 if (TREE_CODE (arg) == COMPLEX_CST
6877 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6878 type, mpfr_hypot)))
6879 return res;
6881 if (TREE_CODE (arg) == COMPLEX_EXPR)
6883 tree real = TREE_OPERAND (arg, 0);
6884 tree imag = TREE_OPERAND (arg, 1);
6886 /* If either part is zero, cabs is fabs of the other. */
6887 if (real_zerop (real))
6888 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6889 if (real_zerop (imag))
6890 return fold_build1_loc (loc, ABS_EXPR, type, real);
6892 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6893 if (flag_unsafe_math_optimizations
6894 && operand_equal_p (real, imag, OEP_PURE_SAME))
6896 const REAL_VALUE_TYPE sqrt2_trunc
6897 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6898 STRIP_NOPS (real);
6899 return fold_build2_loc (loc, MULT_EXPR, type,
6900 fold_build1_loc (loc, ABS_EXPR, type, real),
6901 build_real (type, sqrt2_trunc));
6905 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6906 if (TREE_CODE (arg) == NEGATE_EXPR
6907 || TREE_CODE (arg) == CONJ_EXPR)
6908 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6910 /* Don't do this when optimizing for size. */
6911 if (flag_unsafe_math_optimizations
6912 && optimize && optimize_function_for_speed_p (cfun))
6914 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6916 if (sqrtfn != NULL_TREE)
6918 tree rpart, ipart, result;
6920 arg = builtin_save_expr (arg);
6922 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6923 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6925 rpart = builtin_save_expr (rpart);
6926 ipart = builtin_save_expr (ipart);
6928 result = fold_build2_loc (loc, PLUS_EXPR, type,
6929 fold_build2_loc (loc, MULT_EXPR, type,
6930 rpart, rpart),
6931 fold_build2_loc (loc, MULT_EXPR, type,
6932 ipart, ipart));
6934 return build_call_expr_loc (loc, sqrtfn, 1, result);
6938 return NULL_TREE;
6941 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6942 Return NULL_TREE if no simplification can be made. */
6944 static tree
6945 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6948 enum built_in_function fcode;
6949 tree res;
6951 if (!validate_arg (arg, REAL_TYPE))
6952 return NULL_TREE;
6954 /* Calculate the result when the argument is a constant. */
6955 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6956 return res;
6958 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6959 fcode = builtin_mathfn_code (arg);
6960 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6962 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6963 arg = fold_build2_loc (loc, MULT_EXPR, type,
6964 CALL_EXPR_ARG (arg, 0),
6965 build_real (type, dconsthalf));
6966 return build_call_expr_loc (loc, expfn, 1, arg);
6969 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6970 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6972 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6974 if (powfn)
6976 tree arg0 = CALL_EXPR_ARG (arg, 0);
6977 tree tree_root;
6978 /* The inner root was either sqrt or cbrt. */
6979 /* This was a conditional expression but it triggered a bug
6980 in Sun C 5.5. */
6981 REAL_VALUE_TYPE dconstroot;
6982 if (BUILTIN_SQRT_P (fcode))
6983 dconstroot = dconsthalf;
6984 else
6985 dconstroot = dconst_third ();
6987 /* Adjust for the outer root. */
6988 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6989 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6990 tree_root = build_real (type, dconstroot);
6991 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6995 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6996 if (flag_unsafe_math_optimizations
6997 && (fcode == BUILT_IN_POW
6998 || fcode == BUILT_IN_POWF
6999 || fcode == BUILT_IN_POWL))
7001 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7002 tree arg0 = CALL_EXPR_ARG (arg, 0);
7003 tree arg1 = CALL_EXPR_ARG (arg, 1);
7004 tree narg1;
7005 if (!tree_expr_nonnegative_p (arg0))
7006 arg0 = build1 (ABS_EXPR, type, arg0);
7007 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7008 build_real (type, dconsthalf));
7009 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7012 return NULL_TREE;
7015 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7016 Return NULL_TREE if no simplification can be made. */
7018 static tree
7019 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7021 const enum built_in_function fcode = builtin_mathfn_code (arg);
7022 tree res;
7024 if (!validate_arg (arg, REAL_TYPE))
7025 return NULL_TREE;
7027 /* Calculate the result when the argument is a constant. */
7028 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7029 return res;
7031 if (flag_unsafe_math_optimizations)
7033 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7034 if (BUILTIN_EXPONENT_P (fcode))
7036 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7037 const REAL_VALUE_TYPE third_trunc =
7038 real_value_truncate (TYPE_MODE (type), dconst_third ());
7039 arg = fold_build2_loc (loc, MULT_EXPR, type,
7040 CALL_EXPR_ARG (arg, 0),
7041 build_real (type, third_trunc));
7042 return build_call_expr_loc (loc, expfn, 1, arg);
7045 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7046 if (BUILTIN_SQRT_P (fcode))
7048 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7050 if (powfn)
7052 tree arg0 = CALL_EXPR_ARG (arg, 0);
7053 tree tree_root;
7054 REAL_VALUE_TYPE dconstroot = dconst_third ();
7056 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7057 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7058 tree_root = build_real (type, dconstroot);
7059 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7063 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7064 if (BUILTIN_CBRT_P (fcode))
7066 tree arg0 = CALL_EXPR_ARG (arg, 0);
7067 if (tree_expr_nonnegative_p (arg0))
7069 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7071 if (powfn)
7073 tree tree_root;
7074 REAL_VALUE_TYPE dconstroot;
7076 real_arithmetic (&dconstroot, MULT_EXPR,
7077 dconst_third_ptr (), dconst_third_ptr ());
7078 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7079 tree_root = build_real (type, dconstroot);
7080 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7085 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7086 if (fcode == BUILT_IN_POW
7087 || fcode == BUILT_IN_POWF
7088 || fcode == BUILT_IN_POWL)
7090 tree arg00 = CALL_EXPR_ARG (arg, 0);
7091 tree arg01 = CALL_EXPR_ARG (arg, 1);
7092 if (tree_expr_nonnegative_p (arg00))
7094 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7095 const REAL_VALUE_TYPE dconstroot
7096 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7097 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7098 build_real (type, dconstroot));
7099 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7103 return NULL_TREE;
7106 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7107 TYPE is the type of the return value. Return NULL_TREE if no
7108 simplification can be made. */
7110 static tree
7111 fold_builtin_cos (location_t loc,
7112 tree arg, tree type, tree fndecl)
7114 tree res, narg;
7116 if (!validate_arg (arg, REAL_TYPE))
7117 return NULL_TREE;
7119 /* Calculate the result when the argument is a constant. */
7120 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7121 return res;
7123 /* Optimize cos(-x) into cos (x). */
7124 if ((narg = fold_strip_sign_ops (arg)))
7125 return build_call_expr_loc (loc, fndecl, 1, narg);
7127 return NULL_TREE;
7130 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7131 Return NULL_TREE if no simplification can be made. */
7133 static tree
7134 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7136 if (validate_arg (arg, REAL_TYPE))
7138 tree res, narg;
7140 /* Calculate the result when the argument is a constant. */
7141 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7142 return res;
7144 /* Optimize cosh(-x) into cosh (x). */
7145 if ((narg = fold_strip_sign_ops (arg)))
7146 return build_call_expr_loc (loc, fndecl, 1, narg);
7149 return NULL_TREE;
7152 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7153 argument ARG. TYPE is the type of the return value. Return
7154 NULL_TREE if no simplification can be made. */
7156 static tree
7157 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7158 bool hyper)
7160 if (validate_arg (arg, COMPLEX_TYPE)
7161 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7163 tree tmp;
7165 /* Calculate the result when the argument is a constant. */
7166 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7167 return tmp;
7169 /* Optimize fn(-x) into fn(x). */
7170 if ((tmp = fold_strip_sign_ops (arg)))
7171 return build_call_expr_loc (loc, fndecl, 1, tmp);
7174 return NULL_TREE;
7177 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7178 Return NULL_TREE if no simplification can be made. */
7180 static tree
7181 fold_builtin_tan (tree arg, tree type)
7183 enum built_in_function fcode;
7184 tree res;
7186 if (!validate_arg (arg, REAL_TYPE))
7187 return NULL_TREE;
7189 /* Calculate the result when the argument is a constant. */
7190 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7191 return res;
7193 /* Optimize tan(atan(x)) = x. */
7194 fcode = builtin_mathfn_code (arg);
7195 if (flag_unsafe_math_optimizations
7196 && (fcode == BUILT_IN_ATAN
7197 || fcode == BUILT_IN_ATANF
7198 || fcode == BUILT_IN_ATANL))
7199 return CALL_EXPR_ARG (arg, 0);
7201 return NULL_TREE;
7204 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7205 NULL_TREE if no simplification can be made. */
7207 static tree
7208 fold_builtin_sincos (location_t loc,
7209 tree arg0, tree arg1, tree arg2)
7211 tree type;
7212 tree res, fn, call;
7214 if (!validate_arg (arg0, REAL_TYPE)
7215 || !validate_arg (arg1, POINTER_TYPE)
7216 || !validate_arg (arg2, POINTER_TYPE))
7217 return NULL_TREE;
7219 type = TREE_TYPE (arg0);
7221 /* Calculate the result when the argument is a constant. */
7222 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7223 return res;
7225 /* Canonicalize sincos to cexpi. */
7226 if (!TARGET_C99_FUNCTIONS)
7227 return NULL_TREE;
7228 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7229 if (!fn)
7230 return NULL_TREE;
7232 call = build_call_expr_loc (loc, fn, 1, arg0);
7233 call = builtin_save_expr (call);
7235 return build2 (COMPOUND_EXPR, void_type_node,
7236 build2 (MODIFY_EXPR, void_type_node,
7237 build_fold_indirect_ref_loc (loc, arg1),
7238 build1 (IMAGPART_EXPR, type, call)),
7239 build2 (MODIFY_EXPR, void_type_node,
7240 build_fold_indirect_ref_loc (loc, arg2),
7241 build1 (REALPART_EXPR, type, call)));
7244 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7245 NULL_TREE if no simplification can be made. */
7247 static tree
7248 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7250 tree rtype;
7251 tree realp, imagp, ifn;
7252 tree res;
7254 if (!validate_arg (arg0, COMPLEX_TYPE)
7255 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7256 return NULL_TREE;
7258 /* Calculate the result when the argument is a constant. */
7259 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7260 return res;
7262 rtype = TREE_TYPE (TREE_TYPE (arg0));
7264 /* In case we can figure out the real part of arg0 and it is constant zero
7265 fold to cexpi. */
7266 if (!TARGET_C99_FUNCTIONS)
7267 return NULL_TREE;
7268 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7269 if (!ifn)
7270 return NULL_TREE;
7272 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7273 && real_zerop (realp))
7275 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7276 return build_call_expr_loc (loc, ifn, 1, narg);
7279 /* In case we can easily decompose real and imaginary parts split cexp
7280 to exp (r) * cexpi (i). */
7281 if (flag_unsafe_math_optimizations
7282 && realp)
7284 tree rfn, rcall, icall;
7286 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7287 if (!rfn)
7288 return NULL_TREE;
7290 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7291 if (!imagp)
7292 return NULL_TREE;
7294 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7295 icall = builtin_save_expr (icall);
7296 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7297 rcall = builtin_save_expr (rcall);
7298 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7299 fold_build2_loc (loc, MULT_EXPR, rtype,
7300 rcall,
7301 fold_build1_loc (loc, REALPART_EXPR,
7302 rtype, icall)),
7303 fold_build2_loc (loc, MULT_EXPR, rtype,
7304 rcall,
7305 fold_build1_loc (loc, IMAGPART_EXPR,
7306 rtype, icall)));
7309 return NULL_TREE;
7312 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7313 Return NULL_TREE if no simplification can be made. */
7315 static tree
7316 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7318 if (!validate_arg (arg, REAL_TYPE))
7319 return NULL_TREE;
7321 /* Optimize trunc of constant value. */
7322 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7324 REAL_VALUE_TYPE r, x;
7325 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7327 x = TREE_REAL_CST (arg);
7328 real_trunc (&r, TYPE_MODE (type), &x);
7329 return build_real (type, r);
7332 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7335 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7336 Return NULL_TREE if no simplification can be made. */
7338 static tree
7339 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7341 if (!validate_arg (arg, REAL_TYPE))
7342 return NULL_TREE;
7344 /* Optimize floor of constant value. */
7345 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7347 REAL_VALUE_TYPE x;
7349 x = TREE_REAL_CST (arg);
7350 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7352 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7353 REAL_VALUE_TYPE r;
7355 real_floor (&r, TYPE_MODE (type), &x);
7356 return build_real (type, r);
7360 /* Fold floor (x) where x is nonnegative to trunc (x). */
7361 if (tree_expr_nonnegative_p (arg))
7363 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7364 if (truncfn)
7365 return build_call_expr_loc (loc, truncfn, 1, arg);
7368 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7371 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7372 Return NULL_TREE if no simplification can be made. */
7374 static tree
7375 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7377 if (!validate_arg (arg, REAL_TYPE))
7378 return NULL_TREE;
7380 /* Optimize ceil of constant value. */
7381 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7383 REAL_VALUE_TYPE x;
7385 x = TREE_REAL_CST (arg);
7386 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7388 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7389 REAL_VALUE_TYPE r;
7391 real_ceil (&r, TYPE_MODE (type), &x);
7392 return build_real (type, r);
7396 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7399 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7400 Return NULL_TREE if no simplification can be made. */
7402 static tree
7403 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7405 if (!validate_arg (arg, REAL_TYPE))
7406 return NULL_TREE;
7408 /* Optimize round of constant value. */
7409 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7411 REAL_VALUE_TYPE x;
7413 x = TREE_REAL_CST (arg);
7414 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7416 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7417 REAL_VALUE_TYPE r;
7419 real_round (&r, TYPE_MODE (type), &x);
7420 return build_real (type, r);
7424 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7427 /* Fold function call to builtin lround, lroundf or lroundl (or the
7428 corresponding long long versions) and other rounding functions. ARG
7429 is the argument to the call. Return NULL_TREE if no simplification
7430 can be made. */
7432 static tree
7433 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7435 if (!validate_arg (arg, REAL_TYPE))
7436 return NULL_TREE;
7438 /* Optimize lround of constant value. */
7439 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7441 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7443 if (real_isfinite (&x))
7445 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7446 tree ftype = TREE_TYPE (arg);
7447 unsigned HOST_WIDE_INT lo2;
7448 HOST_WIDE_INT hi, lo;
7449 REAL_VALUE_TYPE r;
7451 switch (DECL_FUNCTION_CODE (fndecl))
7453 CASE_FLT_FN (BUILT_IN_LFLOOR):
7454 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7455 real_floor (&r, TYPE_MODE (ftype), &x);
7456 break;
7458 CASE_FLT_FN (BUILT_IN_LCEIL):
7459 CASE_FLT_FN (BUILT_IN_LLCEIL):
7460 real_ceil (&r, TYPE_MODE (ftype), &x);
7461 break;
7463 CASE_FLT_FN (BUILT_IN_LROUND):
7464 CASE_FLT_FN (BUILT_IN_LLROUND):
7465 real_round (&r, TYPE_MODE (ftype), &x);
7466 break;
7468 default:
7469 gcc_unreachable ();
7472 REAL_VALUE_TO_INT (&lo, &hi, r);
7473 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7474 return build_int_cst_wide (itype, lo2, hi);
7478 switch (DECL_FUNCTION_CODE (fndecl))
7480 CASE_FLT_FN (BUILT_IN_LFLOOR):
7481 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7482 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7483 if (tree_expr_nonnegative_p (arg))
7484 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7485 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7486 break;
7487 default:;
7490 return fold_fixed_mathfn (loc, fndecl, arg);
7493 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7494 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7495 the argument to the call. Return NULL_TREE if no simplification can
7496 be made. */
7498 static tree
7499 fold_builtin_bitop (tree fndecl, tree arg)
7501 if (!validate_arg (arg, INTEGER_TYPE))
7502 return NULL_TREE;
7504 /* Optimize for constant argument. */
7505 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7507 HOST_WIDE_INT hi, width, result;
7508 unsigned HOST_WIDE_INT lo;
7509 tree type;
7511 type = TREE_TYPE (arg);
7512 width = TYPE_PRECISION (type);
7513 lo = TREE_INT_CST_LOW (arg);
7515 /* Clear all the bits that are beyond the type's precision. */
7516 if (width > HOST_BITS_PER_WIDE_INT)
7518 hi = TREE_INT_CST_HIGH (arg);
7519 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7520 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7522 else
7524 hi = 0;
7525 if (width < HOST_BITS_PER_WIDE_INT)
7526 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7529 switch (DECL_FUNCTION_CODE (fndecl))
7531 CASE_INT_FN (BUILT_IN_FFS):
7532 if (lo != 0)
7533 result = exact_log2 (lo & -lo) + 1;
7534 else if (hi != 0)
7535 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7536 else
7537 result = 0;
7538 break;
7540 CASE_INT_FN (BUILT_IN_CLZ):
7541 if (hi != 0)
7542 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7543 else if (lo != 0)
7544 result = width - floor_log2 (lo) - 1;
7545 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7546 result = width;
7547 break;
7549 CASE_INT_FN (BUILT_IN_CTZ):
7550 if (lo != 0)
7551 result = exact_log2 (lo & -lo);
7552 else if (hi != 0)
7553 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7554 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7555 result = width;
7556 break;
7558 CASE_INT_FN (BUILT_IN_POPCOUNT):
7559 result = 0;
7560 while (lo)
7561 result++, lo &= lo - 1;
7562 while (hi)
7563 result++, hi &= hi - 1;
7564 break;
7566 CASE_INT_FN (BUILT_IN_PARITY):
7567 result = 0;
7568 while (lo)
7569 result++, lo &= lo - 1;
7570 while (hi)
7571 result++, hi &= hi - 1;
7572 result &= 1;
7573 break;
7575 default:
7576 gcc_unreachable ();
7579 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7582 return NULL_TREE;
7585 /* Fold function call to builtin_bswap and the long and long long
7586 variants. Return NULL_TREE if no simplification can be made. */
7587 static tree
7588 fold_builtin_bswap (tree fndecl, tree arg)
7590 if (! validate_arg (arg, INTEGER_TYPE))
7591 return NULL_TREE;
7593 /* Optimize constant value. */
7594 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7596 HOST_WIDE_INT hi, width, r_hi = 0;
7597 unsigned HOST_WIDE_INT lo, r_lo = 0;
7598 tree type;
7600 type = TREE_TYPE (arg);
7601 width = TYPE_PRECISION (type);
7602 lo = TREE_INT_CST_LOW (arg);
7603 hi = TREE_INT_CST_HIGH (arg);
7605 switch (DECL_FUNCTION_CODE (fndecl))
7607 case BUILT_IN_BSWAP32:
7608 case BUILT_IN_BSWAP64:
7610 int s;
7612 for (s = 0; s < width; s += 8)
7614 int d = width - s - 8;
7615 unsigned HOST_WIDE_INT byte;
7617 if (s < HOST_BITS_PER_WIDE_INT)
7618 byte = (lo >> s) & 0xff;
7619 else
7620 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7622 if (d < HOST_BITS_PER_WIDE_INT)
7623 r_lo |= byte << d;
7624 else
7625 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7629 break;
7631 default:
7632 gcc_unreachable ();
7635 if (width < HOST_BITS_PER_WIDE_INT)
7636 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7637 else
7638 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7641 return NULL_TREE;
7644 /* A subroutine of fold_builtin to fold the various logarithmic
7645 functions. Return NULL_TREE if no simplification can me made.
7646 FUNC is the corresponding MPFR logarithm function. */
7648 static tree
7649 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7650 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7652 if (validate_arg (arg, REAL_TYPE))
7654 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7655 tree res;
7656 const enum built_in_function fcode = builtin_mathfn_code (arg);
7658 /* Calculate the result when the argument is a constant. */
7659 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7660 return res;
7662 /* Special case, optimize logN(expN(x)) = x. */
7663 if (flag_unsafe_math_optimizations
7664 && ((func == mpfr_log
7665 && (fcode == BUILT_IN_EXP
7666 || fcode == BUILT_IN_EXPF
7667 || fcode == BUILT_IN_EXPL))
7668 || (func == mpfr_log2
7669 && (fcode == BUILT_IN_EXP2
7670 || fcode == BUILT_IN_EXP2F
7671 || fcode == BUILT_IN_EXP2L))
7672 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7673 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7675 /* Optimize logN(func()) for various exponential functions. We
7676 want to determine the value "x" and the power "exponent" in
7677 order to transform logN(x**exponent) into exponent*logN(x). */
7678 if (flag_unsafe_math_optimizations)
7680 tree exponent = 0, x = 0;
7682 switch (fcode)
7684 CASE_FLT_FN (BUILT_IN_EXP):
7685 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7686 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7687 dconst_e ()));
7688 exponent = CALL_EXPR_ARG (arg, 0);
7689 break;
7690 CASE_FLT_FN (BUILT_IN_EXP2):
7691 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7692 x = build_real (type, dconst2);
7693 exponent = CALL_EXPR_ARG (arg, 0);
7694 break;
7695 CASE_FLT_FN (BUILT_IN_EXP10):
7696 CASE_FLT_FN (BUILT_IN_POW10):
7697 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7699 REAL_VALUE_TYPE dconst10;
7700 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7701 x = build_real (type, dconst10);
7703 exponent = CALL_EXPR_ARG (arg, 0);
7704 break;
7705 CASE_FLT_FN (BUILT_IN_SQRT):
7706 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7707 x = CALL_EXPR_ARG (arg, 0);
7708 exponent = build_real (type, dconsthalf);
7709 break;
7710 CASE_FLT_FN (BUILT_IN_CBRT):
7711 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7712 x = CALL_EXPR_ARG (arg, 0);
7713 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7714 dconst_third ()));
7715 break;
7716 CASE_FLT_FN (BUILT_IN_POW):
7717 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7718 x = CALL_EXPR_ARG (arg, 0);
7719 exponent = CALL_EXPR_ARG (arg, 1);
7720 break;
7721 default:
7722 break;
7725 /* Now perform the optimization. */
7726 if (x && exponent)
7728 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7729 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7734 return NULL_TREE;
7737 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7738 NULL_TREE if no simplification can be made. */
7740 static tree
7741 fold_builtin_hypot (location_t loc, tree fndecl,
7742 tree arg0, tree arg1, tree type)
7744 tree res, narg0, narg1;
7746 if (!validate_arg (arg0, REAL_TYPE)
7747 || !validate_arg (arg1, REAL_TYPE))
7748 return NULL_TREE;
7750 /* Calculate the result when the argument is a constant. */
7751 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7752 return res;
7754 /* If either argument to hypot has a negate or abs, strip that off.
7755 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7756 narg0 = fold_strip_sign_ops (arg0);
7757 narg1 = fold_strip_sign_ops (arg1);
7758 if (narg0 || narg1)
7760 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7761 narg1 ? narg1 : arg1);
7764 /* If either argument is zero, hypot is fabs of the other. */
7765 if (real_zerop (arg0))
7766 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7767 else if (real_zerop (arg1))
7768 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7770 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7771 if (flag_unsafe_math_optimizations
7772 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7774 const REAL_VALUE_TYPE sqrt2_trunc
7775 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7776 return fold_build2_loc (loc, MULT_EXPR, type,
7777 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7778 build_real (type, sqrt2_trunc));
7781 return NULL_TREE;
7785 /* Fold a builtin function call to pow, powf, or powl. Return
7786 NULL_TREE if no simplification can be made. */
7787 static tree
7788 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7790 tree res;
7792 if (!validate_arg (arg0, REAL_TYPE)
7793 || !validate_arg (arg1, REAL_TYPE))
7794 return NULL_TREE;
7796 /* Calculate the result when the argument is a constant. */
7797 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7798 return res;
7800 /* Optimize pow(1.0,y) = 1.0. */
7801 if (real_onep (arg0))
7802 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7804 if (TREE_CODE (arg1) == REAL_CST
7805 && !TREE_OVERFLOW (arg1))
7807 REAL_VALUE_TYPE cint;
7808 REAL_VALUE_TYPE c;
7809 HOST_WIDE_INT n;
7811 c = TREE_REAL_CST (arg1);
7813 /* Optimize pow(x,0.0) = 1.0. */
7814 if (REAL_VALUES_EQUAL (c, dconst0))
7815 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7816 arg0);
7818 /* Optimize pow(x,1.0) = x. */
7819 if (REAL_VALUES_EQUAL (c, dconst1))
7820 return arg0;
7822 /* Optimize pow(x,-1.0) = 1.0/x. */
7823 if (REAL_VALUES_EQUAL (c, dconstm1))
7824 return fold_build2_loc (loc, RDIV_EXPR, type,
7825 build_real (type, dconst1), arg0);
7827 /* Optimize pow(x,0.5) = sqrt(x). */
7828 if (flag_unsafe_math_optimizations
7829 && REAL_VALUES_EQUAL (c, dconsthalf))
7831 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7833 if (sqrtfn != NULL_TREE)
7834 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7837 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7838 if (flag_unsafe_math_optimizations)
7840 const REAL_VALUE_TYPE dconstroot
7841 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7843 if (REAL_VALUES_EQUAL (c, dconstroot))
7845 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7846 if (cbrtfn != NULL_TREE)
7847 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7851 /* Check for an integer exponent. */
7852 n = real_to_integer (&c);
7853 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7854 if (real_identical (&c, &cint))
7856 /* Attempt to evaluate pow at compile-time, unless this should
7857 raise an exception. */
7858 if (TREE_CODE (arg0) == REAL_CST
7859 && !TREE_OVERFLOW (arg0)
7860 && (n > 0
7861 || (!flag_trapping_math && !flag_errno_math)
7862 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7864 REAL_VALUE_TYPE x;
7865 bool inexact;
7867 x = TREE_REAL_CST (arg0);
7868 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7869 if (flag_unsafe_math_optimizations || !inexact)
7870 return build_real (type, x);
7873 /* Strip sign ops from even integer powers. */
7874 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7876 tree narg0 = fold_strip_sign_ops (arg0);
7877 if (narg0)
7878 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7883 if (flag_unsafe_math_optimizations)
7885 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7887 /* Optimize pow(expN(x),y) = expN(x*y). */
7888 if (BUILTIN_EXPONENT_P (fcode))
7890 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7891 tree arg = CALL_EXPR_ARG (arg0, 0);
7892 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7893 return build_call_expr_loc (loc, expfn, 1, arg);
7896 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7897 if (BUILTIN_SQRT_P (fcode))
7899 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7900 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7901 build_real (type, dconsthalf));
7902 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7905 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7906 if (BUILTIN_CBRT_P (fcode))
7908 tree arg = CALL_EXPR_ARG (arg0, 0);
7909 if (tree_expr_nonnegative_p (arg))
7911 const REAL_VALUE_TYPE dconstroot
7912 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7913 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7914 build_real (type, dconstroot));
7915 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7919 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7920 if (fcode == BUILT_IN_POW
7921 || fcode == BUILT_IN_POWF
7922 || fcode == BUILT_IN_POWL)
7924 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7925 if (tree_expr_nonnegative_p (arg00))
7927 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7928 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7929 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7934 return NULL_TREE;
7937 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7938 Return NULL_TREE if no simplification can be made. */
7939 static tree
7940 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7941 tree arg0, tree arg1, tree type)
7943 if (!validate_arg (arg0, REAL_TYPE)
7944 || !validate_arg (arg1, INTEGER_TYPE))
7945 return NULL_TREE;
7947 /* Optimize pow(1.0,y) = 1.0. */
7948 if (real_onep (arg0))
7949 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7951 if (host_integerp (arg1, 0))
7953 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7955 /* Evaluate powi at compile-time. */
7956 if (TREE_CODE (arg0) == REAL_CST
7957 && !TREE_OVERFLOW (arg0))
7959 REAL_VALUE_TYPE x;
7960 x = TREE_REAL_CST (arg0);
7961 real_powi (&x, TYPE_MODE (type), &x, c);
7962 return build_real (type, x);
7965 /* Optimize pow(x,0) = 1.0. */
7966 if (c == 0)
7967 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7968 arg0);
7970 /* Optimize pow(x,1) = x. */
7971 if (c == 1)
7972 return arg0;
7974 /* Optimize pow(x,-1) = 1.0/x. */
7975 if (c == -1)
7976 return fold_build2_loc (loc, RDIV_EXPR, type,
7977 build_real (type, dconst1), arg0);
7980 return NULL_TREE;
7983 /* A subroutine of fold_builtin to fold the various exponent
7984 functions. Return NULL_TREE if no simplification can be made.
7985 FUNC is the corresponding MPFR exponent function. */
7987 static tree
7988 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7989 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7991 if (validate_arg (arg, REAL_TYPE))
7993 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7994 tree res;
7996 /* Calculate the result when the argument is a constant. */
7997 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
7998 return res;
8000 /* Optimize expN(logN(x)) = x. */
8001 if (flag_unsafe_math_optimizations)
8003 const enum built_in_function fcode = builtin_mathfn_code (arg);
8005 if ((func == mpfr_exp
8006 && (fcode == BUILT_IN_LOG
8007 || fcode == BUILT_IN_LOGF
8008 || fcode == BUILT_IN_LOGL))
8009 || (func == mpfr_exp2
8010 && (fcode == BUILT_IN_LOG2
8011 || fcode == BUILT_IN_LOG2F
8012 || fcode == BUILT_IN_LOG2L))
8013 || (func == mpfr_exp10
8014 && (fcode == BUILT_IN_LOG10
8015 || fcode == BUILT_IN_LOG10F
8016 || fcode == BUILT_IN_LOG10L)))
8017 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8021 return NULL_TREE;
8024 /* Return true if VAR is a VAR_DECL or a component thereof. */
8026 static bool
8027 var_decl_component_p (tree var)
8029 tree inner = var;
8030 while (handled_component_p (inner))
8031 inner = TREE_OPERAND (inner, 0);
8032 return SSA_VAR_P (inner);
8035 /* Fold function call to builtin memset. Return
8036 NULL_TREE if no simplification can be made. */
8038 static tree
8039 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8040 tree type, bool ignore)
8042 tree var, ret, etype;
8043 unsigned HOST_WIDE_INT length, cval;
8045 if (! validate_arg (dest, POINTER_TYPE)
8046 || ! validate_arg (c, INTEGER_TYPE)
8047 || ! validate_arg (len, INTEGER_TYPE))
8048 return NULL_TREE;
8050 if (! host_integerp (len, 1))
8051 return NULL_TREE;
8053 /* If the LEN parameter is zero, return DEST. */
8054 if (integer_zerop (len))
8055 return omit_one_operand_loc (loc, type, dest, c);
8057 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8058 return NULL_TREE;
8060 var = dest;
8061 STRIP_NOPS (var);
8062 if (TREE_CODE (var) != ADDR_EXPR)
8063 return NULL_TREE;
8065 var = TREE_OPERAND (var, 0);
8066 if (TREE_THIS_VOLATILE (var))
8067 return NULL_TREE;
8069 etype = TREE_TYPE (var);
8070 if (TREE_CODE (etype) == ARRAY_TYPE)
8071 etype = TREE_TYPE (etype);
8073 if (!INTEGRAL_TYPE_P (etype)
8074 && !POINTER_TYPE_P (etype))
8075 return NULL_TREE;
8077 if (! var_decl_component_p (var))
8078 return NULL_TREE;
8080 length = tree_low_cst (len, 1);
8081 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8082 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8083 < (int) length)
8084 return NULL_TREE;
8086 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8087 return NULL_TREE;
8089 if (integer_zerop (c))
8090 cval = 0;
8091 else
8093 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8094 return NULL_TREE;
8096 cval = tree_low_cst (c, 1);
8097 cval &= 0xff;
8098 cval |= cval << 8;
8099 cval |= cval << 16;
8100 cval |= (cval << 31) << 1;
8103 ret = build_int_cst_type (etype, cval);
8104 var = build_fold_indirect_ref_loc (loc,
8105 fold_convert_loc (loc,
8106 build_pointer_type (etype),
8107 dest));
8108 ret = build2 (MODIFY_EXPR, etype, var, ret);
8109 if (ignore)
8110 return ret;
8112 return omit_one_operand_loc (loc, type, dest, ret);
8115 /* Fold function call to builtin memset. Return
8116 NULL_TREE if no simplification can be made. */
8118 static tree
8119 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8121 if (! validate_arg (dest, POINTER_TYPE)
8122 || ! validate_arg (size, INTEGER_TYPE))
8123 return NULL_TREE;
8125 if (!ignore)
8126 return NULL_TREE;
8128 /* New argument list transforming bzero(ptr x, int y) to
8129 memset(ptr x, int 0, size_t y). This is done this way
8130 so that if it isn't expanded inline, we fallback to
8131 calling bzero instead of memset. */
8133 return fold_builtin_memset (loc, dest, integer_zero_node,
8134 fold_convert_loc (loc, sizetype, size),
8135 void_type_node, ignore);
8138 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8139 NULL_TREE if no simplification can be made.
8140 If ENDP is 0, return DEST (like memcpy).
8141 If ENDP is 1, return DEST+LEN (like mempcpy).
8142 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8143 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8144 (memmove). */
8146 static tree
8147 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8148 tree len, tree type, bool ignore, int endp)
8150 tree destvar, srcvar, expr;
8152 if (! validate_arg (dest, POINTER_TYPE)
8153 || ! validate_arg (src, POINTER_TYPE)
8154 || ! validate_arg (len, INTEGER_TYPE))
8155 return NULL_TREE;
8157 /* If the LEN parameter is zero, return DEST. */
8158 if (integer_zerop (len))
8159 return omit_one_operand_loc (loc, type, dest, src);
8161 /* If SRC and DEST are the same (and not volatile), return
8162 DEST{,+LEN,+LEN-1}. */
8163 if (operand_equal_p (src, dest, 0))
8164 expr = len;
8165 else
8167 tree srctype, desttype;
8168 int src_align, dest_align;
8170 if (endp == 3)
8172 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8173 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8175 /* Both DEST and SRC must be pointer types.
8176 ??? This is what old code did. Is the testing for pointer types
8177 really mandatory?
8179 If either SRC is readonly or length is 1, we can use memcpy. */
8180 if (!dest_align || !src_align)
8181 return NULL_TREE;
8182 if (readonly_data_expr (src)
8183 || (host_integerp (len, 1)
8184 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8185 >= tree_low_cst (len, 1))))
8187 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8188 if (!fn)
8189 return NULL_TREE;
8190 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8193 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8194 srcvar = build_fold_indirect_ref_loc (loc, src);
8195 destvar = build_fold_indirect_ref_loc (loc, dest);
8196 if (srcvar
8197 && !TREE_THIS_VOLATILE (srcvar)
8198 && destvar
8199 && !TREE_THIS_VOLATILE (destvar))
8201 tree src_base, dest_base, fn;
8202 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8203 HOST_WIDE_INT size = -1;
8204 HOST_WIDE_INT maxsize = -1;
8206 src_base = srcvar;
8207 if (handled_component_p (src_base))
8208 src_base = get_ref_base_and_extent (src_base, &src_offset,
8209 &size, &maxsize);
8210 dest_base = destvar;
8211 if (handled_component_p (dest_base))
8212 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8213 &size, &maxsize);
8214 if (host_integerp (len, 1))
8216 maxsize = tree_low_cst (len, 1);
8217 if (maxsize
8218 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8219 maxsize = -1;
8220 else
8221 maxsize *= BITS_PER_UNIT;
8223 else
8224 maxsize = -1;
8225 if (SSA_VAR_P (src_base)
8226 && SSA_VAR_P (dest_base))
8228 if (operand_equal_p (src_base, dest_base, 0)
8229 && ranges_overlap_p (src_offset, maxsize,
8230 dest_offset, maxsize))
8231 return NULL_TREE;
8233 else if (TREE_CODE (src_base) == INDIRECT_REF
8234 && TREE_CODE (dest_base) == INDIRECT_REF)
8236 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8237 TREE_OPERAND (dest_base, 0), 0)
8238 || ranges_overlap_p (src_offset, maxsize,
8239 dest_offset, maxsize))
8240 return NULL_TREE;
8242 else
8243 return NULL_TREE;
8245 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8246 if (!fn)
8247 return NULL_TREE;
8248 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8250 return NULL_TREE;
8253 if (!host_integerp (len, 0))
8254 return NULL_TREE;
8255 /* FIXME:
8256 This logic lose for arguments like (type *)malloc (sizeof (type)),
8257 since we strip the casts of up to VOID return value from malloc.
8258 Perhaps we ought to inherit type from non-VOID argument here? */
8259 STRIP_NOPS (src);
8260 STRIP_NOPS (dest);
8261 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8262 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8264 tree tem = TREE_OPERAND (src, 0);
8265 STRIP_NOPS (tem);
8266 if (tem != TREE_OPERAND (src, 0))
8267 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8269 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8271 tree tem = TREE_OPERAND (dest, 0);
8272 STRIP_NOPS (tem);
8273 if (tem != TREE_OPERAND (dest, 0))
8274 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8276 srctype = TREE_TYPE (TREE_TYPE (src));
8277 if (srctype
8278 && TREE_CODE (srctype) == ARRAY_TYPE
8279 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8281 srctype = TREE_TYPE (srctype);
8282 STRIP_NOPS (src);
8283 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8285 desttype = TREE_TYPE (TREE_TYPE (dest));
8286 if (desttype
8287 && TREE_CODE (desttype) == ARRAY_TYPE
8288 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8290 desttype = TREE_TYPE (desttype);
8291 STRIP_NOPS (dest);
8292 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8294 if (!srctype || !desttype
8295 || !TYPE_SIZE_UNIT (srctype)
8296 || !TYPE_SIZE_UNIT (desttype)
8297 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8298 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8299 || TYPE_VOLATILE (srctype)
8300 || TYPE_VOLATILE (desttype))
8301 return NULL_TREE;
8303 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8304 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8305 if (dest_align < (int) TYPE_ALIGN (desttype)
8306 || src_align < (int) TYPE_ALIGN (srctype))
8307 return NULL_TREE;
8309 if (!ignore)
8310 dest = builtin_save_expr (dest);
8312 srcvar = NULL_TREE;
8313 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8315 srcvar = build_fold_indirect_ref_loc (loc, src);
8316 if (TREE_THIS_VOLATILE (srcvar))
8317 return NULL_TREE;
8318 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8319 srcvar = NULL_TREE;
8320 /* With memcpy, it is possible to bypass aliasing rules, so without
8321 this check i.e. execute/20060930-2.c would be misoptimized,
8322 because it use conflicting alias set to hold argument for the
8323 memcpy call. This check is probably unnecessary with
8324 -fno-strict-aliasing. Similarly for destvar. See also
8325 PR29286. */
8326 else if (!var_decl_component_p (srcvar))
8327 srcvar = NULL_TREE;
8330 destvar = NULL_TREE;
8331 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8333 destvar = build_fold_indirect_ref_loc (loc, dest);
8334 if (TREE_THIS_VOLATILE (destvar))
8335 return NULL_TREE;
8336 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8337 destvar = NULL_TREE;
8338 else if (!var_decl_component_p (destvar))
8339 destvar = NULL_TREE;
8342 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8343 return NULL_TREE;
8345 if (srcvar == NULL_TREE)
8347 tree srcptype;
8348 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8349 return NULL_TREE;
8351 srctype = build_qualified_type (desttype, 0);
8352 if (src_align < (int) TYPE_ALIGN (srctype))
8354 if (AGGREGATE_TYPE_P (srctype)
8355 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8356 return NULL_TREE;
8358 srctype = build_variant_type_copy (srctype);
8359 TYPE_ALIGN (srctype) = src_align;
8360 TYPE_USER_ALIGN (srctype) = 1;
8361 TYPE_PACKED (srctype) = 1;
8363 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8364 src = fold_convert_loc (loc, srcptype, src);
8365 srcvar = build_fold_indirect_ref_loc (loc, src);
8367 else if (destvar == NULL_TREE)
8369 tree destptype;
8370 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8371 return NULL_TREE;
8373 desttype = build_qualified_type (srctype, 0);
8374 if (dest_align < (int) TYPE_ALIGN (desttype))
8376 if (AGGREGATE_TYPE_P (desttype)
8377 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8378 return NULL_TREE;
8380 desttype = build_variant_type_copy (desttype);
8381 TYPE_ALIGN (desttype) = dest_align;
8382 TYPE_USER_ALIGN (desttype) = 1;
8383 TYPE_PACKED (desttype) = 1;
8385 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8386 dest = fold_convert_loc (loc, destptype, dest);
8387 destvar = build_fold_indirect_ref_loc (loc, dest);
8390 if (srctype == desttype
8391 || (gimple_in_ssa_p (cfun)
8392 && useless_type_conversion_p (desttype, srctype)))
8393 expr = srcvar;
8394 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8395 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8396 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8397 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8398 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8399 else
8400 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8401 TREE_TYPE (destvar), srcvar);
8402 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8405 if (ignore)
8406 return expr;
8408 if (endp == 0 || endp == 3)
8409 return omit_one_operand_loc (loc, type, dest, expr);
8411 if (expr == len)
8412 expr = NULL_TREE;
8414 if (endp == 2)
8415 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8416 ssize_int (1));
8418 len = fold_convert_loc (loc, sizetype, len);
8419 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8420 dest = fold_convert_loc (loc, type, dest);
8421 if (expr)
8422 dest = omit_one_operand_loc (loc, type, dest, expr);
8423 return dest;
8426 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8427 If LEN is not NULL, it represents the length of the string to be
8428 copied. Return NULL_TREE if no simplification can be made. */
8430 tree
8431 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8433 tree fn;
8435 if (!validate_arg (dest, POINTER_TYPE)
8436 || !validate_arg (src, POINTER_TYPE))
8437 return NULL_TREE;
8439 /* If SRC and DEST are the same (and not volatile), return DEST. */
8440 if (operand_equal_p (src, dest, 0))
8441 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8443 if (optimize_function_for_size_p (cfun))
8444 return NULL_TREE;
8446 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8447 if (!fn)
8448 return NULL_TREE;
8450 if (!len)
8452 len = c_strlen (src, 1);
8453 if (! len || TREE_SIDE_EFFECTS (len))
8454 return NULL_TREE;
8457 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8458 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8459 build_call_expr_loc (loc, fn, 3, dest, src, len));
8462 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8463 Return NULL_TREE if no simplification can be made. */
8465 static tree
8466 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8468 tree fn, len, lenp1, call, type;
8470 if (!validate_arg (dest, POINTER_TYPE)
8471 || !validate_arg (src, POINTER_TYPE))
8472 return NULL_TREE;
8474 len = c_strlen (src, 1);
8475 if (!len
8476 || TREE_CODE (len) != INTEGER_CST)
8477 return NULL_TREE;
8479 if (optimize_function_for_size_p (cfun)
8480 /* If length is zero it's small enough. */
8481 && !integer_zerop (len))
8482 return NULL_TREE;
8484 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8485 if (!fn)
8486 return NULL_TREE;
8488 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8489 /* We use dest twice in building our expression. Save it from
8490 multiple expansions. */
8491 dest = builtin_save_expr (dest);
8492 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8494 type = TREE_TYPE (TREE_TYPE (fndecl));
8495 len = fold_convert_loc (loc, sizetype, len);
8496 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8497 dest = fold_convert_loc (loc, type, dest);
8498 dest = omit_one_operand_loc (loc, type, dest, call);
8499 return dest;
8502 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8503 If SLEN is not NULL, it represents the length of the source string.
8504 Return NULL_TREE if no simplification can be made. */
8506 tree
8507 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8508 tree src, tree len, tree slen)
8510 tree fn;
8512 if (!validate_arg (dest, POINTER_TYPE)
8513 || !validate_arg (src, POINTER_TYPE)
8514 || !validate_arg (len, INTEGER_TYPE))
8515 return NULL_TREE;
8517 /* If the LEN parameter is zero, return DEST. */
8518 if (integer_zerop (len))
8519 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8521 /* We can't compare slen with len as constants below if len is not a
8522 constant. */
8523 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8524 return NULL_TREE;
8526 if (!slen)
8527 slen = c_strlen (src, 1);
8529 /* Now, we must be passed a constant src ptr parameter. */
8530 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8531 return NULL_TREE;
8533 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8535 /* We do not support simplification of this case, though we do
8536 support it when expanding trees into RTL. */
8537 /* FIXME: generate a call to __builtin_memset. */
8538 if (tree_int_cst_lt (slen, len))
8539 return NULL_TREE;
8541 /* OK transform into builtin memcpy. */
8542 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8543 if (!fn)
8544 return NULL_TREE;
8545 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8546 build_call_expr_loc (loc, fn, 3, dest, src, len));
8549 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8550 arguments to the call, and TYPE is its return type.
8551 Return NULL_TREE if no simplification can be made. */
8553 static tree
8554 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8556 if (!validate_arg (arg1, POINTER_TYPE)
8557 || !validate_arg (arg2, INTEGER_TYPE)
8558 || !validate_arg (len, INTEGER_TYPE))
8559 return NULL_TREE;
8560 else
8562 const char *p1;
8564 if (TREE_CODE (arg2) != INTEGER_CST
8565 || !host_integerp (len, 1))
8566 return NULL_TREE;
8568 p1 = c_getstr (arg1);
8569 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8571 char c;
8572 const char *r;
8573 tree tem;
8575 if (target_char_cast (arg2, &c))
8576 return NULL_TREE;
8578 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8580 if (r == NULL)
8581 return build_int_cst (TREE_TYPE (arg1), 0);
8583 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8584 size_int (r - p1));
8585 return fold_convert_loc (loc, type, tem);
8587 return NULL_TREE;
8591 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8592 Return NULL_TREE if no simplification can be made. */
8594 static tree
8595 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8597 const char *p1, *p2;
8599 if (!validate_arg (arg1, POINTER_TYPE)
8600 || !validate_arg (arg2, POINTER_TYPE)
8601 || !validate_arg (len, INTEGER_TYPE))
8602 return NULL_TREE;
8604 /* If the LEN parameter is zero, return zero. */
8605 if (integer_zerop (len))
8606 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8607 arg1, arg2);
8609 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8610 if (operand_equal_p (arg1, arg2, 0))
8611 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8613 p1 = c_getstr (arg1);
8614 p2 = c_getstr (arg2);
8616 /* If all arguments are constant, and the value of len is not greater
8617 than the lengths of arg1 and arg2, evaluate at compile-time. */
8618 if (host_integerp (len, 1) && p1 && p2
8619 && compare_tree_int (len, strlen (p1) + 1) <= 0
8620 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8622 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8624 if (r > 0)
8625 return integer_one_node;
8626 else if (r < 0)
8627 return integer_minus_one_node;
8628 else
8629 return integer_zero_node;
8632 /* If len parameter is one, return an expression corresponding to
8633 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8634 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8636 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8637 tree cst_uchar_ptr_node
8638 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8640 tree ind1
8641 = fold_convert_loc (loc, integer_type_node,
8642 build1 (INDIRECT_REF, cst_uchar_node,
8643 fold_convert_loc (loc,
8644 cst_uchar_ptr_node,
8645 arg1)));
8646 tree ind2
8647 = fold_convert_loc (loc, integer_type_node,
8648 build1 (INDIRECT_REF, cst_uchar_node,
8649 fold_convert_loc (loc,
8650 cst_uchar_ptr_node,
8651 arg2)));
8652 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8655 return NULL_TREE;
8658 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8659 Return NULL_TREE if no simplification can be made. */
8661 static tree
8662 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8664 const char *p1, *p2;
8666 if (!validate_arg (arg1, POINTER_TYPE)
8667 || !validate_arg (arg2, POINTER_TYPE))
8668 return NULL_TREE;
8670 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8671 if (operand_equal_p (arg1, arg2, 0))
8672 return integer_zero_node;
8674 p1 = c_getstr (arg1);
8675 p2 = c_getstr (arg2);
8677 if (p1 && p2)
8679 const int i = strcmp (p1, p2);
8680 if (i < 0)
8681 return integer_minus_one_node;
8682 else if (i > 0)
8683 return integer_one_node;
8684 else
8685 return integer_zero_node;
8688 /* If the second arg is "", return *(const unsigned char*)arg1. */
8689 if (p2 && *p2 == '\0')
8691 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8692 tree cst_uchar_ptr_node
8693 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8695 return fold_convert_loc (loc, integer_type_node,
8696 build1 (INDIRECT_REF, cst_uchar_node,
8697 fold_convert_loc (loc,
8698 cst_uchar_ptr_node,
8699 arg1)));
8702 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8703 if (p1 && *p1 == '\0')
8705 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8706 tree cst_uchar_ptr_node
8707 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8709 tree temp
8710 = fold_convert_loc (loc, integer_type_node,
8711 build1 (INDIRECT_REF, cst_uchar_node,
8712 fold_convert_loc (loc,
8713 cst_uchar_ptr_node,
8714 arg2)));
8715 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8718 return NULL_TREE;
8721 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8722 Return NULL_TREE if no simplification can be made. */
8724 static tree
8725 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8727 const char *p1, *p2;
8729 if (!validate_arg (arg1, POINTER_TYPE)
8730 || !validate_arg (arg2, POINTER_TYPE)
8731 || !validate_arg (len, INTEGER_TYPE))
8732 return NULL_TREE;
8734 /* If the LEN parameter is zero, return zero. */
8735 if (integer_zerop (len))
8736 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8737 arg1, arg2);
8739 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8740 if (operand_equal_p (arg1, arg2, 0))
8741 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8743 p1 = c_getstr (arg1);
8744 p2 = c_getstr (arg2);
8746 if (host_integerp (len, 1) && p1 && p2)
8748 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8749 if (i > 0)
8750 return integer_one_node;
8751 else if (i < 0)
8752 return integer_minus_one_node;
8753 else
8754 return integer_zero_node;
8757 /* If the second arg is "", and the length is greater than zero,
8758 return *(const unsigned char*)arg1. */
8759 if (p2 && *p2 == '\0'
8760 && TREE_CODE (len) == INTEGER_CST
8761 && tree_int_cst_sgn (len) == 1)
8763 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8764 tree cst_uchar_ptr_node
8765 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8767 return fold_convert_loc (loc, integer_type_node,
8768 build1 (INDIRECT_REF, cst_uchar_node,
8769 fold_convert_loc (loc,
8770 cst_uchar_ptr_node,
8771 arg1)));
8774 /* If the first arg is "", and the length is greater than zero,
8775 return -*(const unsigned char*)arg2. */
8776 if (p1 && *p1 == '\0'
8777 && TREE_CODE (len) == INTEGER_CST
8778 && tree_int_cst_sgn (len) == 1)
8780 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8781 tree cst_uchar_ptr_node
8782 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8784 tree temp = fold_convert_loc (loc, integer_type_node,
8785 build1 (INDIRECT_REF, cst_uchar_node,
8786 fold_convert_loc (loc,
8787 cst_uchar_ptr_node,
8788 arg2)));
8789 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8792 /* If len parameter is one, return an expression corresponding to
8793 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8794 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8796 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8797 tree cst_uchar_ptr_node
8798 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8800 tree ind1 = fold_convert_loc (loc, integer_type_node,
8801 build1 (INDIRECT_REF, cst_uchar_node,
8802 fold_convert_loc (loc,
8803 cst_uchar_ptr_node,
8804 arg1)));
8805 tree ind2 = fold_convert_loc (loc, integer_type_node,
8806 build1 (INDIRECT_REF, cst_uchar_node,
8807 fold_convert_loc (loc,
8808 cst_uchar_ptr_node,
8809 arg2)));
8810 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8813 return NULL_TREE;
8816 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8817 ARG. Return NULL_TREE if no simplification can be made. */
8819 static tree
8820 fold_builtin_signbit (location_t loc, tree arg, tree type)
8822 tree temp;
8824 if (!validate_arg (arg, REAL_TYPE))
8825 return NULL_TREE;
8827 /* If ARG is a compile-time constant, determine the result. */
8828 if (TREE_CODE (arg) == REAL_CST
8829 && !TREE_OVERFLOW (arg))
8831 REAL_VALUE_TYPE c;
8833 c = TREE_REAL_CST (arg);
8834 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8835 return fold_convert_loc (loc, type, temp);
8838 /* If ARG is non-negative, the result is always zero. */
8839 if (tree_expr_nonnegative_p (arg))
8840 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8842 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8843 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8844 return fold_build2_loc (loc, LT_EXPR, type, arg,
8845 build_real (TREE_TYPE (arg), dconst0));
8847 return NULL_TREE;
8850 /* Fold function call to builtin copysign, copysignf or copysignl with
8851 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8852 be made. */
8854 static tree
8855 fold_builtin_copysign (location_t loc, tree fndecl,
8856 tree arg1, tree arg2, tree type)
8858 tree tem;
8860 if (!validate_arg (arg1, REAL_TYPE)
8861 || !validate_arg (arg2, REAL_TYPE))
8862 return NULL_TREE;
8864 /* copysign(X,X) is X. */
8865 if (operand_equal_p (arg1, arg2, 0))
8866 return fold_convert_loc (loc, type, arg1);
8868 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8869 if (TREE_CODE (arg1) == REAL_CST
8870 && TREE_CODE (arg2) == REAL_CST
8871 && !TREE_OVERFLOW (arg1)
8872 && !TREE_OVERFLOW (arg2))
8874 REAL_VALUE_TYPE c1, c2;
8876 c1 = TREE_REAL_CST (arg1);
8877 c2 = TREE_REAL_CST (arg2);
8878 /* c1.sign := c2.sign. */
8879 real_copysign (&c1, &c2);
8880 return build_real (type, c1);
8883 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8884 Remember to evaluate Y for side-effects. */
8885 if (tree_expr_nonnegative_p (arg2))
8886 return omit_one_operand_loc (loc, type,
8887 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8888 arg2);
8890 /* Strip sign changing operations for the first argument. */
8891 tem = fold_strip_sign_ops (arg1);
8892 if (tem)
8893 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8895 return NULL_TREE;
8898 /* Fold a call to builtin isascii with argument ARG. */
8900 static tree
8901 fold_builtin_isascii (location_t loc, tree arg)
8903 if (!validate_arg (arg, INTEGER_TYPE))
8904 return NULL_TREE;
8905 else
8907 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8908 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8909 build_int_cst (NULL_TREE,
8910 ~ (unsigned HOST_WIDE_INT) 0x7f));
8911 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8912 arg, integer_zero_node);
8916 /* Fold a call to builtin toascii with argument ARG. */
8918 static tree
8919 fold_builtin_toascii (location_t loc, tree arg)
8921 if (!validate_arg (arg, INTEGER_TYPE))
8922 return NULL_TREE;
8924 /* Transform toascii(c) -> (c & 0x7f). */
8925 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8926 build_int_cst (NULL_TREE, 0x7f));
8929 /* Fold a call to builtin isdigit with argument ARG. */
8931 static tree
8932 fold_builtin_isdigit (location_t loc, tree arg)
8934 if (!validate_arg (arg, INTEGER_TYPE))
8935 return NULL_TREE;
8936 else
8938 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8939 /* According to the C standard, isdigit is unaffected by locale.
8940 However, it definitely is affected by the target character set. */
8941 unsigned HOST_WIDE_INT target_digit0
8942 = lang_hooks.to_target_charset ('0');
8944 if (target_digit0 == 0)
8945 return NULL_TREE;
8947 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8948 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8949 build_int_cst (unsigned_type_node, target_digit0));
8950 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8951 build_int_cst (unsigned_type_node, 9));
8955 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8957 static tree
8958 fold_builtin_fabs (location_t loc, tree arg, tree type)
8960 if (!validate_arg (arg, REAL_TYPE))
8961 return NULL_TREE;
8963 arg = fold_convert_loc (loc, type, arg);
8964 if (TREE_CODE (arg) == REAL_CST)
8965 return fold_abs_const (arg, type);
8966 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8969 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8971 static tree
8972 fold_builtin_abs (location_t loc, tree arg, tree type)
8974 if (!validate_arg (arg, INTEGER_TYPE))
8975 return NULL_TREE;
8977 arg = fold_convert_loc (loc, type, arg);
8978 if (TREE_CODE (arg) == INTEGER_CST)
8979 return fold_abs_const (arg, type);
8980 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8983 /* Fold a call to builtin fmin or fmax. */
8985 static tree
8986 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8987 tree type, bool max)
8989 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8991 /* Calculate the result when the argument is a constant. */
8992 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8994 if (res)
8995 return res;
8997 /* If either argument is NaN, return the other one. Avoid the
8998 transformation if we get (and honor) a signalling NaN. Using
8999 omit_one_operand() ensures we create a non-lvalue. */
9000 if (TREE_CODE (arg0) == REAL_CST
9001 && real_isnan (&TREE_REAL_CST (arg0))
9002 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9003 || ! TREE_REAL_CST (arg0).signalling))
9004 return omit_one_operand_loc (loc, type, arg1, arg0);
9005 if (TREE_CODE (arg1) == REAL_CST
9006 && real_isnan (&TREE_REAL_CST (arg1))
9007 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9008 || ! TREE_REAL_CST (arg1).signalling))
9009 return omit_one_operand_loc (loc, type, arg0, arg1);
9011 /* Transform fmin/fmax(x,x) -> x. */
9012 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9013 return omit_one_operand_loc (loc, type, arg0, arg1);
9015 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9016 functions to return the numeric arg if the other one is NaN.
9017 These tree codes don't honor that, so only transform if
9018 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9019 handled, so we don't have to worry about it either. */
9020 if (flag_finite_math_only)
9021 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9022 fold_convert_loc (loc, type, arg0),
9023 fold_convert_loc (loc, type, arg1));
9025 return NULL_TREE;
9028 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9030 static tree
9031 fold_builtin_carg (location_t loc, tree arg, tree type)
9033 if (validate_arg (arg, COMPLEX_TYPE)
9034 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9036 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9038 if (atan2_fn)
9040 tree new_arg = builtin_save_expr (arg);
9041 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9042 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9043 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9047 return NULL_TREE;
9050 /* Fold a call to builtin logb/ilogb. */
9052 static tree
9053 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9055 if (! validate_arg (arg, REAL_TYPE))
9056 return NULL_TREE;
9058 STRIP_NOPS (arg);
9060 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9062 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9064 switch (value->cl)
9066 case rvc_nan:
9067 case rvc_inf:
9068 /* If arg is Inf or NaN and we're logb, return it. */
9069 if (TREE_CODE (rettype) == REAL_TYPE)
9070 return fold_convert_loc (loc, rettype, arg);
9071 /* Fall through... */
9072 case rvc_zero:
9073 /* Zero may set errno and/or raise an exception for logb, also
9074 for ilogb we don't know FP_ILOGB0. */
9075 return NULL_TREE;
9076 case rvc_normal:
9077 /* For normal numbers, proceed iff radix == 2. In GCC,
9078 normalized significands are in the range [0.5, 1.0). We
9079 want the exponent as if they were [1.0, 2.0) so get the
9080 exponent and subtract 1. */
9081 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9082 return fold_convert_loc (loc, rettype,
9083 build_int_cst (NULL_TREE,
9084 REAL_EXP (value)-1));
9085 break;
9089 return NULL_TREE;
9092 /* Fold a call to builtin significand, if radix == 2. */
9094 static tree
9095 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9097 if (! validate_arg (arg, REAL_TYPE))
9098 return NULL_TREE;
9100 STRIP_NOPS (arg);
9102 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9104 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9106 switch (value->cl)
9108 case rvc_zero:
9109 case rvc_nan:
9110 case rvc_inf:
9111 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9112 return fold_convert_loc (loc, rettype, arg);
9113 case rvc_normal:
9114 /* For normal numbers, proceed iff radix == 2. */
9115 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9117 REAL_VALUE_TYPE result = *value;
9118 /* In GCC, normalized significands are in the range [0.5,
9119 1.0). We want them to be [1.0, 2.0) so set the
9120 exponent to 1. */
9121 SET_REAL_EXP (&result, 1);
9122 return build_real (rettype, result);
9124 break;
9128 return NULL_TREE;
9131 /* Fold a call to builtin frexp, we can assume the base is 2. */
9133 static tree
9134 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9136 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9137 return NULL_TREE;
9139 STRIP_NOPS (arg0);
9141 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9142 return NULL_TREE;
9144 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9146 /* Proceed if a valid pointer type was passed in. */
9147 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9149 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9150 tree frac, exp;
9152 switch (value->cl)
9154 case rvc_zero:
9155 /* For +-0, return (*exp = 0, +-0). */
9156 exp = integer_zero_node;
9157 frac = arg0;
9158 break;
9159 case rvc_nan:
9160 case rvc_inf:
9161 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9162 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9163 case rvc_normal:
9165 /* Since the frexp function always expects base 2, and in
9166 GCC normalized significands are already in the range
9167 [0.5, 1.0), we have exactly what frexp wants. */
9168 REAL_VALUE_TYPE frac_rvt = *value;
9169 SET_REAL_EXP (&frac_rvt, 0);
9170 frac = build_real (rettype, frac_rvt);
9171 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9173 break;
9174 default:
9175 gcc_unreachable ();
9178 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9179 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9180 TREE_SIDE_EFFECTS (arg1) = 1;
9181 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9184 return NULL_TREE;
9187 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9188 then we can assume the base is two. If it's false, then we have to
9189 check the mode of the TYPE parameter in certain cases. */
9191 static tree
9192 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9193 tree type, bool ldexp)
9195 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9197 STRIP_NOPS (arg0);
9198 STRIP_NOPS (arg1);
9200 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9201 if (real_zerop (arg0) || integer_zerop (arg1)
9202 || (TREE_CODE (arg0) == REAL_CST
9203 && !real_isfinite (&TREE_REAL_CST (arg0))))
9204 return omit_one_operand_loc (loc, type, arg0, arg1);
9206 /* If both arguments are constant, then try to evaluate it. */
9207 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9208 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9209 && host_integerp (arg1, 0))
9211 /* Bound the maximum adjustment to twice the range of the
9212 mode's valid exponents. Use abs to ensure the range is
9213 positive as a sanity check. */
9214 const long max_exp_adj = 2 *
9215 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9216 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9218 /* Get the user-requested adjustment. */
9219 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9221 /* The requested adjustment must be inside this range. This
9222 is a preliminary cap to avoid things like overflow, we
9223 may still fail to compute the result for other reasons. */
9224 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9226 REAL_VALUE_TYPE initial_result;
9228 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9230 /* Ensure we didn't overflow. */
9231 if (! real_isinf (&initial_result))
9233 const REAL_VALUE_TYPE trunc_result
9234 = real_value_truncate (TYPE_MODE (type), initial_result);
9236 /* Only proceed if the target mode can hold the
9237 resulting value. */
9238 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9239 return build_real (type, trunc_result);
9245 return NULL_TREE;
9248 /* Fold a call to builtin modf. */
9250 static tree
9251 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9253 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9254 return NULL_TREE;
9256 STRIP_NOPS (arg0);
9258 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9259 return NULL_TREE;
9261 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9263 /* Proceed if a valid pointer type was passed in. */
9264 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9266 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9267 REAL_VALUE_TYPE trunc, frac;
9269 switch (value->cl)
9271 case rvc_nan:
9272 case rvc_zero:
9273 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9274 trunc = frac = *value;
9275 break;
9276 case rvc_inf:
9277 /* For +-Inf, return (*arg1 = arg0, +-0). */
9278 frac = dconst0;
9279 frac.sign = value->sign;
9280 trunc = *value;
9281 break;
9282 case rvc_normal:
9283 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9284 real_trunc (&trunc, VOIDmode, value);
9285 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9286 /* If the original number was negative and already
9287 integral, then the fractional part is -0.0. */
9288 if (value->sign && frac.cl == rvc_zero)
9289 frac.sign = value->sign;
9290 break;
9293 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9294 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9295 build_real (rettype, trunc));
9296 TREE_SIDE_EFFECTS (arg1) = 1;
9297 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9298 build_real (rettype, frac));
9301 return NULL_TREE;
9304 /* Given a location LOC, an interclass builtin function decl FNDECL
9305 and its single argument ARG, return an folded expression computing
9306 the same, or NULL_TREE if we either couldn't or didn't want to fold
9307 (the latter happen if there's an RTL instruction available). */
9309 static tree
9310 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9312 enum machine_mode mode;
9314 if (!validate_arg (arg, REAL_TYPE))
9315 return NULL_TREE;
9317 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9318 return NULL_TREE;
9320 mode = TYPE_MODE (TREE_TYPE (arg));
9322 /* If there is no optab, try generic code. */
9323 switch (DECL_FUNCTION_CODE (fndecl))
9325 tree result;
9327 CASE_FLT_FN (BUILT_IN_ISINF):
9329 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9330 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9331 tree const type = TREE_TYPE (arg);
9332 REAL_VALUE_TYPE r;
9333 char buf[128];
9335 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9336 real_from_string (&r, buf);
9337 result = build_call_expr (isgr_fn, 2,
9338 fold_build1_loc (loc, ABS_EXPR, type, arg),
9339 build_real (type, r));
9340 return result;
9342 CASE_FLT_FN (BUILT_IN_FINITE):
9343 case BUILT_IN_ISFINITE:
9345 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9346 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9347 tree const type = TREE_TYPE (arg);
9348 REAL_VALUE_TYPE r;
9349 char buf[128];
9351 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9352 real_from_string (&r, buf);
9353 result = build_call_expr (isle_fn, 2,
9354 fold_build1_loc (loc, ABS_EXPR, type, arg),
9355 build_real (type, r));
9356 /*result = fold_build2_loc (loc, UNGT_EXPR,
9357 TREE_TYPE (TREE_TYPE (fndecl)),
9358 fold_build1_loc (loc, ABS_EXPR, type, arg),
9359 build_real (type, r));
9360 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9361 TREE_TYPE (TREE_TYPE (fndecl)),
9362 result);*/
9363 return result;
9365 case BUILT_IN_ISNORMAL:
9367 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9368 islessequal(fabs(x),DBL_MAX). */
9369 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9370 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9371 tree const type = TREE_TYPE (arg);
9372 REAL_VALUE_TYPE rmax, rmin;
9373 char buf[128];
9375 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9376 real_from_string (&rmax, buf);
9377 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9378 real_from_string (&rmin, buf);
9379 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9380 result = build_call_expr (isle_fn, 2, arg,
9381 build_real (type, rmax));
9382 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9383 build_call_expr (isge_fn, 2, arg,
9384 build_real (type, rmin)));
9385 return result;
9387 default:
9388 break;
9391 return NULL_TREE;
9394 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9395 ARG is the argument for the call. */
9397 static tree
9398 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9400 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9401 REAL_VALUE_TYPE r;
9403 if (!validate_arg (arg, REAL_TYPE))
9404 return NULL_TREE;
9406 switch (builtin_index)
9408 case BUILT_IN_ISINF:
9409 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9410 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9412 if (TREE_CODE (arg) == REAL_CST)
9414 r = TREE_REAL_CST (arg);
9415 if (real_isinf (&r))
9416 return real_compare (GT_EXPR, &r, &dconst0)
9417 ? integer_one_node : integer_minus_one_node;
9418 else
9419 return integer_zero_node;
9422 return NULL_TREE;
9424 case BUILT_IN_ISINF_SIGN:
9426 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9427 /* In a boolean context, GCC will fold the inner COND_EXPR to
9428 1. So e.g. "if (isinf_sign(x))" would be folded to just
9429 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9430 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9431 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9432 tree tmp = NULL_TREE;
9434 arg = builtin_save_expr (arg);
9436 if (signbit_fn && isinf_fn)
9438 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9439 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9441 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9442 signbit_call, integer_zero_node);
9443 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9444 isinf_call, integer_zero_node);
9446 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9447 integer_minus_one_node, integer_one_node);
9448 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9449 isinf_call, tmp,
9450 integer_zero_node);
9453 return tmp;
9456 case BUILT_IN_ISFINITE:
9457 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9458 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9459 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9461 if (TREE_CODE (arg) == REAL_CST)
9463 r = TREE_REAL_CST (arg);
9464 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9467 return NULL_TREE;
9469 case BUILT_IN_ISNAN:
9470 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9471 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9473 if (TREE_CODE (arg) == REAL_CST)
9475 r = TREE_REAL_CST (arg);
9476 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9479 arg = builtin_save_expr (arg);
9480 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9482 default:
9483 gcc_unreachable ();
9487 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9488 This builtin will generate code to return the appropriate floating
9489 point classification depending on the value of the floating point
9490 number passed in. The possible return values must be supplied as
9491 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9492 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9493 one floating point argument which is "type generic". */
9495 static tree
9496 fold_builtin_fpclassify (location_t loc, tree exp)
9498 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9499 arg, type, res, tmp;
9500 enum machine_mode mode;
9501 REAL_VALUE_TYPE r;
9502 char buf[128];
9504 /* Verify the required arguments in the original call. */
9505 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9506 INTEGER_TYPE, INTEGER_TYPE,
9507 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9508 return NULL_TREE;
9510 fp_nan = CALL_EXPR_ARG (exp, 0);
9511 fp_infinite = CALL_EXPR_ARG (exp, 1);
9512 fp_normal = CALL_EXPR_ARG (exp, 2);
9513 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9514 fp_zero = CALL_EXPR_ARG (exp, 4);
9515 arg = CALL_EXPR_ARG (exp, 5);
9516 type = TREE_TYPE (arg);
9517 mode = TYPE_MODE (type);
9518 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9520 /* fpclassify(x) ->
9521 isnan(x) ? FP_NAN :
9522 (fabs(x) == Inf ? FP_INFINITE :
9523 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9524 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9526 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9527 build_real (type, dconst0));
9528 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9529 tmp, fp_zero, fp_subnormal);
9531 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9532 real_from_string (&r, buf);
9533 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9534 arg, build_real (type, r));
9535 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9537 if (HONOR_INFINITIES (mode))
9539 real_inf (&r);
9540 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9541 build_real (type, r));
9542 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9543 fp_infinite, res);
9546 if (HONOR_NANS (mode))
9548 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9549 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9552 return res;
9555 /* Fold a call to an unordered comparison function such as
9556 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9557 being called and ARG0 and ARG1 are the arguments for the call.
9558 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9559 the opposite of the desired result. UNORDERED_CODE is used
9560 for modes that can hold NaNs and ORDERED_CODE is used for
9561 the rest. */
9563 static tree
9564 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9565 enum tree_code unordered_code,
9566 enum tree_code ordered_code)
9568 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9569 enum tree_code code;
9570 tree type0, type1;
9571 enum tree_code code0, code1;
9572 tree cmp_type = NULL_TREE;
9574 type0 = TREE_TYPE (arg0);
9575 type1 = TREE_TYPE (arg1);
9577 code0 = TREE_CODE (type0);
9578 code1 = TREE_CODE (type1);
9580 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9581 /* Choose the wider of two real types. */
9582 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9583 ? type0 : type1;
9584 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9585 cmp_type = type0;
9586 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9587 cmp_type = type1;
9589 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9590 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9592 if (unordered_code == UNORDERED_EXPR)
9594 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9595 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9596 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9599 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9600 : ordered_code;
9601 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9602 fold_build2_loc (loc, code, type, arg0, arg1));
9605 /* Fold a call to built-in function FNDECL with 0 arguments.
9606 IGNORE is true if the result of the function call is ignored. This
9607 function returns NULL_TREE if no simplification was possible. */
9609 static tree
9610 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9612 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9613 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9614 switch (fcode)
9616 CASE_FLT_FN (BUILT_IN_INF):
9617 case BUILT_IN_INFD32:
9618 case BUILT_IN_INFD64:
9619 case BUILT_IN_INFD128:
9620 return fold_builtin_inf (loc, type, true);
9622 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9623 return fold_builtin_inf (loc, type, false);
9625 case BUILT_IN_CLASSIFY_TYPE:
9626 return fold_builtin_classify_type (NULL_TREE);
9628 default:
9629 break;
9631 return NULL_TREE;
9634 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9635 IGNORE is true if the result of the function call is ignored. This
9636 function returns NULL_TREE if no simplification was possible. */
9638 static tree
9639 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9641 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9642 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9643 switch (fcode)
9646 case BUILT_IN_CONSTANT_P:
9648 tree val = fold_builtin_constant_p (arg0);
9650 /* Gimplification will pull the CALL_EXPR for the builtin out of
9651 an if condition. When not optimizing, we'll not CSE it back.
9652 To avoid link error types of regressions, return false now. */
9653 if (!val && !optimize)
9654 val = integer_zero_node;
9656 return val;
9659 case BUILT_IN_CLASSIFY_TYPE:
9660 return fold_builtin_classify_type (arg0);
9662 case BUILT_IN_STRLEN:
9663 return fold_builtin_strlen (loc, type, arg0);
9665 CASE_FLT_FN (BUILT_IN_FABS):
9666 return fold_builtin_fabs (loc, arg0, type);
9668 case BUILT_IN_ABS:
9669 case BUILT_IN_LABS:
9670 case BUILT_IN_LLABS:
9671 case BUILT_IN_IMAXABS:
9672 return fold_builtin_abs (loc, arg0, type);
9674 CASE_FLT_FN (BUILT_IN_CONJ):
9675 if (validate_arg (arg0, COMPLEX_TYPE)
9676 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9677 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9678 break;
9680 CASE_FLT_FN (BUILT_IN_CREAL):
9681 if (validate_arg (arg0, COMPLEX_TYPE)
9682 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9683 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9684 break;
9686 CASE_FLT_FN (BUILT_IN_CIMAG):
9687 if (validate_arg (arg0, COMPLEX_TYPE)
9688 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9689 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9690 break;
9692 CASE_FLT_FN (BUILT_IN_CCOS):
9693 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9695 CASE_FLT_FN (BUILT_IN_CCOSH):
9696 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9698 CASE_FLT_FN (BUILT_IN_CSIN):
9699 if (validate_arg (arg0, COMPLEX_TYPE)
9700 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9701 return do_mpc_arg1 (arg0, type, mpc_sin);
9702 break;
9704 CASE_FLT_FN (BUILT_IN_CSINH):
9705 if (validate_arg (arg0, COMPLEX_TYPE)
9706 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9707 return do_mpc_arg1 (arg0, type, mpc_sinh);
9708 break;
9710 CASE_FLT_FN (BUILT_IN_CTAN):
9711 if (validate_arg (arg0, COMPLEX_TYPE)
9712 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9713 return do_mpc_arg1 (arg0, type, mpc_tan);
9714 break;
9716 CASE_FLT_FN (BUILT_IN_CTANH):
9717 if (validate_arg (arg0, COMPLEX_TYPE)
9718 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9719 return do_mpc_arg1 (arg0, type, mpc_tanh);
9720 break;
9722 CASE_FLT_FN (BUILT_IN_CLOG):
9723 if (validate_arg (arg0, COMPLEX_TYPE)
9724 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9725 return do_mpc_arg1 (arg0, type, mpc_log);
9726 break;
9728 CASE_FLT_FN (BUILT_IN_CSQRT):
9729 if (validate_arg (arg0, COMPLEX_TYPE)
9730 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9731 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9732 break;
9734 CASE_FLT_FN (BUILT_IN_CASIN):
9735 if (validate_arg (arg0, COMPLEX_TYPE)
9736 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9737 return do_mpc_arg1 (arg0, type, mpc_asin);
9738 break;
9740 CASE_FLT_FN (BUILT_IN_CACOS):
9741 if (validate_arg (arg0, COMPLEX_TYPE)
9742 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9743 return do_mpc_arg1 (arg0, type, mpc_acos);
9744 break;
9746 CASE_FLT_FN (BUILT_IN_CATAN):
9747 if (validate_arg (arg0, COMPLEX_TYPE)
9748 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9749 return do_mpc_arg1 (arg0, type, mpc_atan);
9750 break;
9752 CASE_FLT_FN (BUILT_IN_CASINH):
9753 if (validate_arg (arg0, COMPLEX_TYPE)
9754 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9755 return do_mpc_arg1 (arg0, type, mpc_asinh);
9756 break;
9758 CASE_FLT_FN (BUILT_IN_CACOSH):
9759 if (validate_arg (arg0, COMPLEX_TYPE)
9760 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9761 return do_mpc_arg1 (arg0, type, mpc_acosh);
9762 break;
9764 CASE_FLT_FN (BUILT_IN_CATANH):
9765 if (validate_arg (arg0, COMPLEX_TYPE)
9766 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9767 return do_mpc_arg1 (arg0, type, mpc_atanh);
9768 break;
9770 CASE_FLT_FN (BUILT_IN_CABS):
9771 return fold_builtin_cabs (loc, arg0, type, fndecl);
9773 CASE_FLT_FN (BUILT_IN_CARG):
9774 return fold_builtin_carg (loc, arg0, type);
9776 CASE_FLT_FN (BUILT_IN_SQRT):
9777 return fold_builtin_sqrt (loc, arg0, type);
9779 CASE_FLT_FN (BUILT_IN_CBRT):
9780 return fold_builtin_cbrt (loc, arg0, type);
9782 CASE_FLT_FN (BUILT_IN_ASIN):
9783 if (validate_arg (arg0, REAL_TYPE))
9784 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9785 &dconstm1, &dconst1, true);
9786 break;
9788 CASE_FLT_FN (BUILT_IN_ACOS):
9789 if (validate_arg (arg0, REAL_TYPE))
9790 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9791 &dconstm1, &dconst1, true);
9792 break;
9794 CASE_FLT_FN (BUILT_IN_ATAN):
9795 if (validate_arg (arg0, REAL_TYPE))
9796 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9797 break;
9799 CASE_FLT_FN (BUILT_IN_ASINH):
9800 if (validate_arg (arg0, REAL_TYPE))
9801 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9802 break;
9804 CASE_FLT_FN (BUILT_IN_ACOSH):
9805 if (validate_arg (arg0, REAL_TYPE))
9806 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9807 &dconst1, NULL, true);
9808 break;
9810 CASE_FLT_FN (BUILT_IN_ATANH):
9811 if (validate_arg (arg0, REAL_TYPE))
9812 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9813 &dconstm1, &dconst1, false);
9814 break;
9816 CASE_FLT_FN (BUILT_IN_SIN):
9817 if (validate_arg (arg0, REAL_TYPE))
9818 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9819 break;
9821 CASE_FLT_FN (BUILT_IN_COS):
9822 return fold_builtin_cos (loc, arg0, type, fndecl);
9824 CASE_FLT_FN (BUILT_IN_TAN):
9825 return fold_builtin_tan (arg0, type);
9827 CASE_FLT_FN (BUILT_IN_CEXP):
9828 return fold_builtin_cexp (loc, arg0, type);
9830 CASE_FLT_FN (BUILT_IN_CEXPI):
9831 if (validate_arg (arg0, REAL_TYPE))
9832 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9833 break;
9835 CASE_FLT_FN (BUILT_IN_SINH):
9836 if (validate_arg (arg0, REAL_TYPE))
9837 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9838 break;
9840 CASE_FLT_FN (BUILT_IN_COSH):
9841 return fold_builtin_cosh (loc, arg0, type, fndecl);
9843 CASE_FLT_FN (BUILT_IN_TANH):
9844 if (validate_arg (arg0, REAL_TYPE))
9845 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9846 break;
9848 CASE_FLT_FN (BUILT_IN_ERF):
9849 if (validate_arg (arg0, REAL_TYPE))
9850 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9851 break;
9853 CASE_FLT_FN (BUILT_IN_ERFC):
9854 if (validate_arg (arg0, REAL_TYPE))
9855 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9856 break;
9858 CASE_FLT_FN (BUILT_IN_TGAMMA):
9859 if (validate_arg (arg0, REAL_TYPE))
9860 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9861 break;
9863 CASE_FLT_FN (BUILT_IN_EXP):
9864 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9866 CASE_FLT_FN (BUILT_IN_EXP2):
9867 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9869 CASE_FLT_FN (BUILT_IN_EXP10):
9870 CASE_FLT_FN (BUILT_IN_POW10):
9871 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9873 CASE_FLT_FN (BUILT_IN_EXPM1):
9874 if (validate_arg (arg0, REAL_TYPE))
9875 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9876 break;
9878 CASE_FLT_FN (BUILT_IN_LOG):
9879 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9881 CASE_FLT_FN (BUILT_IN_LOG2):
9882 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9884 CASE_FLT_FN (BUILT_IN_LOG10):
9885 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9887 CASE_FLT_FN (BUILT_IN_LOG1P):
9888 if (validate_arg (arg0, REAL_TYPE))
9889 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9890 &dconstm1, NULL, false);
9891 break;
9893 CASE_FLT_FN (BUILT_IN_J0):
9894 if (validate_arg (arg0, REAL_TYPE))
9895 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9896 NULL, NULL, 0);
9897 break;
9899 CASE_FLT_FN (BUILT_IN_J1):
9900 if (validate_arg (arg0, REAL_TYPE))
9901 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9902 NULL, NULL, 0);
9903 break;
9905 CASE_FLT_FN (BUILT_IN_Y0):
9906 if (validate_arg (arg0, REAL_TYPE))
9907 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9908 &dconst0, NULL, false);
9909 break;
9911 CASE_FLT_FN (BUILT_IN_Y1):
9912 if (validate_arg (arg0, REAL_TYPE))
9913 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9914 &dconst0, NULL, false);
9915 break;
9917 CASE_FLT_FN (BUILT_IN_NAN):
9918 case BUILT_IN_NAND32:
9919 case BUILT_IN_NAND64:
9920 case BUILT_IN_NAND128:
9921 return fold_builtin_nan (arg0, type, true);
9923 CASE_FLT_FN (BUILT_IN_NANS):
9924 return fold_builtin_nan (arg0, type, false);
9926 CASE_FLT_FN (BUILT_IN_FLOOR):
9927 return fold_builtin_floor (loc, fndecl, arg0);
9929 CASE_FLT_FN (BUILT_IN_CEIL):
9930 return fold_builtin_ceil (loc, fndecl, arg0);
9932 CASE_FLT_FN (BUILT_IN_TRUNC):
9933 return fold_builtin_trunc (loc, fndecl, arg0);
9935 CASE_FLT_FN (BUILT_IN_ROUND):
9936 return fold_builtin_round (loc, fndecl, arg0);
9938 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9939 CASE_FLT_FN (BUILT_IN_RINT):
9940 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9942 CASE_FLT_FN (BUILT_IN_LCEIL):
9943 CASE_FLT_FN (BUILT_IN_LLCEIL):
9944 CASE_FLT_FN (BUILT_IN_LFLOOR):
9945 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9946 CASE_FLT_FN (BUILT_IN_LROUND):
9947 CASE_FLT_FN (BUILT_IN_LLROUND):
9948 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9950 CASE_FLT_FN (BUILT_IN_LRINT):
9951 CASE_FLT_FN (BUILT_IN_LLRINT):
9952 return fold_fixed_mathfn (loc, fndecl, arg0);
9954 case BUILT_IN_BSWAP32:
9955 case BUILT_IN_BSWAP64:
9956 return fold_builtin_bswap (fndecl, arg0);
9958 CASE_INT_FN (BUILT_IN_FFS):
9959 CASE_INT_FN (BUILT_IN_CLZ):
9960 CASE_INT_FN (BUILT_IN_CTZ):
9961 CASE_INT_FN (BUILT_IN_POPCOUNT):
9962 CASE_INT_FN (BUILT_IN_PARITY):
9963 return fold_builtin_bitop (fndecl, arg0);
9965 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9966 return fold_builtin_signbit (loc, arg0, type);
9968 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9969 return fold_builtin_significand (loc, arg0, type);
9971 CASE_FLT_FN (BUILT_IN_ILOGB):
9972 CASE_FLT_FN (BUILT_IN_LOGB):
9973 return fold_builtin_logb (loc, arg0, type);
9975 case BUILT_IN_ISASCII:
9976 return fold_builtin_isascii (loc, arg0);
9978 case BUILT_IN_TOASCII:
9979 return fold_builtin_toascii (loc, arg0);
9981 case BUILT_IN_ISDIGIT:
9982 return fold_builtin_isdigit (loc, arg0);
9984 CASE_FLT_FN (BUILT_IN_FINITE):
9985 case BUILT_IN_FINITED32:
9986 case BUILT_IN_FINITED64:
9987 case BUILT_IN_FINITED128:
9988 case BUILT_IN_ISFINITE:
9990 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9991 if (ret)
9992 return ret;
9993 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9996 CASE_FLT_FN (BUILT_IN_ISINF):
9997 case BUILT_IN_ISINFD32:
9998 case BUILT_IN_ISINFD64:
9999 case BUILT_IN_ISINFD128:
10001 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10002 if (ret)
10003 return ret;
10004 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10007 case BUILT_IN_ISNORMAL:
10008 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10010 case BUILT_IN_ISINF_SIGN:
10011 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10013 CASE_FLT_FN (BUILT_IN_ISNAN):
10014 case BUILT_IN_ISNAND32:
10015 case BUILT_IN_ISNAND64:
10016 case BUILT_IN_ISNAND128:
10017 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10019 case BUILT_IN_PRINTF:
10020 case BUILT_IN_PRINTF_UNLOCKED:
10021 case BUILT_IN_VPRINTF:
10022 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10024 default:
10025 break;
10028 return NULL_TREE;
10032 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10033 IGNORE is true if the result of the function call is ignored. This
10034 function returns NULL_TREE if no simplification was possible. */
10036 static tree
10037 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10039 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10040 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10042 switch (fcode)
10044 CASE_FLT_FN (BUILT_IN_JN):
10045 if (validate_arg (arg0, INTEGER_TYPE)
10046 && validate_arg (arg1, REAL_TYPE))
10047 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10048 break;
10050 CASE_FLT_FN (BUILT_IN_YN):
10051 if (validate_arg (arg0, INTEGER_TYPE)
10052 && validate_arg (arg1, REAL_TYPE))
10053 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10054 &dconst0, false);
10055 break;
10057 CASE_FLT_FN (BUILT_IN_DREM):
10058 CASE_FLT_FN (BUILT_IN_REMAINDER):
10059 if (validate_arg (arg0, REAL_TYPE)
10060 && validate_arg(arg1, REAL_TYPE))
10061 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10062 break;
10064 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10065 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10066 if (validate_arg (arg0, REAL_TYPE)
10067 && validate_arg(arg1, POINTER_TYPE))
10068 return do_mpfr_lgamma_r (arg0, arg1, type);
10069 break;
10071 CASE_FLT_FN (BUILT_IN_ATAN2):
10072 if (validate_arg (arg0, REAL_TYPE)
10073 && validate_arg(arg1, REAL_TYPE))
10074 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10075 break;
10077 CASE_FLT_FN (BUILT_IN_FDIM):
10078 if (validate_arg (arg0, REAL_TYPE)
10079 && validate_arg(arg1, REAL_TYPE))
10080 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10081 break;
10083 CASE_FLT_FN (BUILT_IN_HYPOT):
10084 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10086 CASE_FLT_FN (BUILT_IN_CPOW):
10087 if (validate_arg (arg0, COMPLEX_TYPE)
10088 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10089 && validate_arg (arg1, COMPLEX_TYPE)
10090 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10091 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10092 break;
10094 CASE_FLT_FN (BUILT_IN_LDEXP):
10095 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10096 CASE_FLT_FN (BUILT_IN_SCALBN):
10097 CASE_FLT_FN (BUILT_IN_SCALBLN):
10098 return fold_builtin_load_exponent (loc, arg0, arg1,
10099 type, /*ldexp=*/false);
10101 CASE_FLT_FN (BUILT_IN_FREXP):
10102 return fold_builtin_frexp (loc, arg0, arg1, type);
10104 CASE_FLT_FN (BUILT_IN_MODF):
10105 return fold_builtin_modf (loc, arg0, arg1, type);
10107 case BUILT_IN_BZERO:
10108 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10110 case BUILT_IN_FPUTS:
10111 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10113 case BUILT_IN_FPUTS_UNLOCKED:
10114 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10116 case BUILT_IN_STRSTR:
10117 return fold_builtin_strstr (loc, arg0, arg1, type);
10119 case BUILT_IN_STRCAT:
10120 return fold_builtin_strcat (loc, arg0, arg1);
10122 case BUILT_IN_STRSPN:
10123 return fold_builtin_strspn (loc, arg0, arg1);
10125 case BUILT_IN_STRCSPN:
10126 return fold_builtin_strcspn (loc, arg0, arg1);
10128 case BUILT_IN_STRCHR:
10129 case BUILT_IN_INDEX:
10130 return fold_builtin_strchr (loc, arg0, arg1, type);
10132 case BUILT_IN_STRRCHR:
10133 case BUILT_IN_RINDEX:
10134 return fold_builtin_strrchr (loc, arg0, arg1, type);
10136 case BUILT_IN_STRCPY:
10137 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10139 case BUILT_IN_STPCPY:
10140 if (ignore)
10142 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10143 if (!fn)
10144 break;
10146 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10148 else
10149 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10150 break;
10152 case BUILT_IN_STRCMP:
10153 return fold_builtin_strcmp (loc, arg0, arg1);
10155 case BUILT_IN_STRPBRK:
10156 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10158 case BUILT_IN_EXPECT:
10159 return fold_builtin_expect (loc, arg0, arg1);
10161 CASE_FLT_FN (BUILT_IN_POW):
10162 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10164 CASE_FLT_FN (BUILT_IN_POWI):
10165 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10167 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10168 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10170 CASE_FLT_FN (BUILT_IN_FMIN):
10171 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10173 CASE_FLT_FN (BUILT_IN_FMAX):
10174 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10176 case BUILT_IN_ISGREATER:
10177 return fold_builtin_unordered_cmp (loc, fndecl,
10178 arg0, arg1, UNLE_EXPR, LE_EXPR);
10179 case BUILT_IN_ISGREATEREQUAL:
10180 return fold_builtin_unordered_cmp (loc, fndecl,
10181 arg0, arg1, UNLT_EXPR, LT_EXPR);
10182 case BUILT_IN_ISLESS:
10183 return fold_builtin_unordered_cmp (loc, fndecl,
10184 arg0, arg1, UNGE_EXPR, GE_EXPR);
10185 case BUILT_IN_ISLESSEQUAL:
10186 return fold_builtin_unordered_cmp (loc, fndecl,
10187 arg0, arg1, UNGT_EXPR, GT_EXPR);
10188 case BUILT_IN_ISLESSGREATER:
10189 return fold_builtin_unordered_cmp (loc, fndecl,
10190 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10191 case BUILT_IN_ISUNORDERED:
10192 return fold_builtin_unordered_cmp (loc, fndecl,
10193 arg0, arg1, UNORDERED_EXPR,
10194 NOP_EXPR);
10196 /* We do the folding for va_start in the expander. */
10197 case BUILT_IN_VA_START:
10198 break;
10200 case BUILT_IN_SPRINTF:
10201 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10203 case BUILT_IN_OBJECT_SIZE:
10204 return fold_builtin_object_size (arg0, arg1);
10206 case BUILT_IN_PRINTF:
10207 case BUILT_IN_PRINTF_UNLOCKED:
10208 case BUILT_IN_VPRINTF:
10209 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10211 case BUILT_IN_PRINTF_CHK:
10212 case BUILT_IN_VPRINTF_CHK:
10213 if (!validate_arg (arg0, INTEGER_TYPE)
10214 || TREE_SIDE_EFFECTS (arg0))
10215 return NULL_TREE;
10216 else
10217 return fold_builtin_printf (loc, fndecl,
10218 arg1, NULL_TREE, ignore, fcode);
10219 break;
10221 case BUILT_IN_FPRINTF:
10222 case BUILT_IN_FPRINTF_UNLOCKED:
10223 case BUILT_IN_VFPRINTF:
10224 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10225 ignore, fcode);
10227 default:
10228 break;
10230 return NULL_TREE;
10233 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10234 and ARG2. IGNORE is true if the result of the function call is ignored.
10235 This function returns NULL_TREE if no simplification was possible. */
10237 static tree
10238 fold_builtin_3 (location_t loc, tree fndecl,
10239 tree arg0, tree arg1, tree arg2, bool ignore)
10241 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10242 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10243 switch (fcode)
10246 CASE_FLT_FN (BUILT_IN_SINCOS):
10247 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10249 CASE_FLT_FN (BUILT_IN_FMA):
10250 if (validate_arg (arg0, REAL_TYPE)
10251 && validate_arg(arg1, REAL_TYPE)
10252 && validate_arg(arg2, REAL_TYPE))
10253 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10254 break;
10256 CASE_FLT_FN (BUILT_IN_REMQUO):
10257 if (validate_arg (arg0, REAL_TYPE)
10258 && validate_arg(arg1, REAL_TYPE)
10259 && validate_arg(arg2, POINTER_TYPE))
10260 return do_mpfr_remquo (arg0, arg1, arg2);
10261 break;
10263 case BUILT_IN_MEMSET:
10264 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10266 case BUILT_IN_BCOPY:
10267 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10268 void_type_node, true, /*endp=*/3);
10270 case BUILT_IN_MEMCPY:
10271 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10272 type, ignore, /*endp=*/0);
10274 case BUILT_IN_MEMPCPY:
10275 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10276 type, ignore, /*endp=*/1);
10278 case BUILT_IN_MEMMOVE:
10279 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10280 type, ignore, /*endp=*/3);
10282 case BUILT_IN_STRNCAT:
10283 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10285 case BUILT_IN_STRNCPY:
10286 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10288 case BUILT_IN_STRNCMP:
10289 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10291 case BUILT_IN_MEMCHR:
10292 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10294 case BUILT_IN_BCMP:
10295 case BUILT_IN_MEMCMP:
10296 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10298 case BUILT_IN_SPRINTF:
10299 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10301 case BUILT_IN_STRCPY_CHK:
10302 case BUILT_IN_STPCPY_CHK:
10303 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10304 ignore, fcode);
10306 case BUILT_IN_STRCAT_CHK:
10307 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10309 case BUILT_IN_PRINTF_CHK:
10310 case BUILT_IN_VPRINTF_CHK:
10311 if (!validate_arg (arg0, INTEGER_TYPE)
10312 || TREE_SIDE_EFFECTS (arg0))
10313 return NULL_TREE;
10314 else
10315 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10316 break;
10318 case BUILT_IN_FPRINTF:
10319 case BUILT_IN_FPRINTF_UNLOCKED:
10320 case BUILT_IN_VFPRINTF:
10321 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10322 ignore, fcode);
10324 case BUILT_IN_FPRINTF_CHK:
10325 case BUILT_IN_VFPRINTF_CHK:
10326 if (!validate_arg (arg1, INTEGER_TYPE)
10327 || TREE_SIDE_EFFECTS (arg1))
10328 return NULL_TREE;
10329 else
10330 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10331 ignore, fcode);
10333 default:
10334 break;
10336 return NULL_TREE;
10339 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10340 ARG2, and ARG3. IGNORE is true if the result of the function call is
10341 ignored. This function returns NULL_TREE if no simplification was
10342 possible. */
10344 static tree
10345 fold_builtin_4 (location_t loc, tree fndecl,
10346 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10348 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10350 switch (fcode)
10352 case BUILT_IN_MEMCPY_CHK:
10353 case BUILT_IN_MEMPCPY_CHK:
10354 case BUILT_IN_MEMMOVE_CHK:
10355 case BUILT_IN_MEMSET_CHK:
10356 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10357 NULL_TREE, ignore,
10358 DECL_FUNCTION_CODE (fndecl));
10360 case BUILT_IN_STRNCPY_CHK:
10361 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10363 case BUILT_IN_STRNCAT_CHK:
10364 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10366 case BUILT_IN_FPRINTF_CHK:
10367 case BUILT_IN_VFPRINTF_CHK:
10368 if (!validate_arg (arg1, INTEGER_TYPE)
10369 || TREE_SIDE_EFFECTS (arg1))
10370 return NULL_TREE;
10371 else
10372 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10373 ignore, fcode);
10374 break;
10376 default:
10377 break;
10379 return NULL_TREE;
10382 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10383 arguments, where NARGS <= 4. IGNORE is true if the result of the
10384 function call is ignored. This function returns NULL_TREE if no
10385 simplification was possible. Note that this only folds builtins with
10386 fixed argument patterns. Foldings that do varargs-to-varargs
10387 transformations, or that match calls with more than 4 arguments,
10388 need to be handled with fold_builtin_varargs instead. */
10390 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10392 static tree
10393 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10395 tree ret = NULL_TREE;
10397 switch (nargs)
10399 case 0:
10400 ret = fold_builtin_0 (loc, fndecl, ignore);
10401 break;
10402 case 1:
10403 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10404 break;
10405 case 2:
10406 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10407 break;
10408 case 3:
10409 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10410 break;
10411 case 4:
10412 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10413 ignore);
10414 break;
10415 default:
10416 break;
10418 if (ret)
10420 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10421 SET_EXPR_LOCATION (ret, loc);
10422 TREE_NO_WARNING (ret) = 1;
10423 return ret;
10425 return NULL_TREE;
10428 /* Builtins with folding operations that operate on "..." arguments
10429 need special handling; we need to store the arguments in a convenient
10430 data structure before attempting any folding. Fortunately there are
10431 only a few builtins that fall into this category. FNDECL is the
10432 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10433 result of the function call is ignored. */
10435 static tree
10436 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10437 bool ignore ATTRIBUTE_UNUSED)
10439 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10440 tree ret = NULL_TREE;
10442 switch (fcode)
10444 case BUILT_IN_SPRINTF_CHK:
10445 case BUILT_IN_VSPRINTF_CHK:
10446 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10447 break;
10449 case BUILT_IN_SNPRINTF_CHK:
10450 case BUILT_IN_VSNPRINTF_CHK:
10451 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10452 break;
10454 case BUILT_IN_FPCLASSIFY:
10455 ret = fold_builtin_fpclassify (loc, exp);
10456 break;
10458 default:
10459 break;
10461 if (ret)
10463 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10464 SET_EXPR_LOCATION (ret, loc);
10465 TREE_NO_WARNING (ret) = 1;
10466 return ret;
10468 return NULL_TREE;
10471 /* Return true if FNDECL shouldn't be folded right now.
10472 If a built-in function has an inline attribute always_inline
10473 wrapper, defer folding it after always_inline functions have
10474 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10475 might not be performed. */
10477 static bool
10478 avoid_folding_inline_builtin (tree fndecl)
10480 return (DECL_DECLARED_INLINE_P (fndecl)
10481 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10482 && cfun
10483 && !cfun->always_inline_functions_inlined
10484 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10487 /* A wrapper function for builtin folding that prevents warnings for
10488 "statement without effect" and the like, caused by removing the
10489 call node earlier than the warning is generated. */
10491 tree
10492 fold_call_expr (location_t loc, tree exp, bool ignore)
10494 tree ret = NULL_TREE;
10495 tree fndecl = get_callee_fndecl (exp);
10496 if (fndecl
10497 && TREE_CODE (fndecl) == FUNCTION_DECL
10498 && DECL_BUILT_IN (fndecl)
10499 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10500 yet. Defer folding until we see all the arguments
10501 (after inlining). */
10502 && !CALL_EXPR_VA_ARG_PACK (exp))
10504 int nargs = call_expr_nargs (exp);
10506 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10507 instead last argument is __builtin_va_arg_pack (). Defer folding
10508 even in that case, until arguments are finalized. */
10509 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10511 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10512 if (fndecl2
10513 && TREE_CODE (fndecl2) == FUNCTION_DECL
10514 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10515 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10516 return NULL_TREE;
10519 if (avoid_folding_inline_builtin (fndecl))
10520 return NULL_TREE;
10522 /* FIXME: Don't use a list in this interface. */
10523 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10524 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10525 else
10527 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10529 tree *args = CALL_EXPR_ARGP (exp);
10530 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10532 if (!ret)
10533 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10534 if (ret)
10535 return ret;
10538 return NULL_TREE;
10541 /* Conveniently construct a function call expression. FNDECL names the
10542 function to be called and ARGLIST is a TREE_LIST of arguments. */
10544 tree
10545 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10547 tree fntype = TREE_TYPE (fndecl);
10548 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10549 int n = list_length (arglist);
10550 tree *argarray = (tree *) alloca (n * sizeof (tree));
10551 int i;
10553 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10554 argarray[i] = TREE_VALUE (arglist);
10555 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10558 /* Conveniently construct a function call expression. FNDECL names the
10559 function to be called, N is the number of arguments, and the "..."
10560 parameters are the argument expressions. */
10562 tree
10563 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10565 va_list ap;
10566 tree fntype = TREE_TYPE (fndecl);
10567 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10568 tree *argarray = (tree *) alloca (n * sizeof (tree));
10569 int i;
10571 va_start (ap, n);
10572 for (i = 0; i < n; i++)
10573 argarray[i] = va_arg (ap, tree);
10574 va_end (ap);
10575 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10578 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10579 N arguments are passed in the array ARGARRAY. */
10581 tree
10582 fold_builtin_call_array (location_t loc, tree type,
10583 tree fn,
10584 int n,
10585 tree *argarray)
10587 tree ret = NULL_TREE;
10588 int i;
10589 tree exp;
10591 if (TREE_CODE (fn) == ADDR_EXPR)
10593 tree fndecl = TREE_OPERAND (fn, 0);
10594 if (TREE_CODE (fndecl) == FUNCTION_DECL
10595 && DECL_BUILT_IN (fndecl))
10597 /* If last argument is __builtin_va_arg_pack (), arguments to this
10598 function are not finalized yet. Defer folding until they are. */
10599 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10601 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10602 if (fndecl2
10603 && TREE_CODE (fndecl2) == FUNCTION_DECL
10604 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10605 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10606 return build_call_array_loc (loc, type, fn, n, argarray);
10608 if (avoid_folding_inline_builtin (fndecl))
10609 return build_call_array_loc (loc, type, fn, n, argarray);
10610 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10612 tree arglist = NULL_TREE;
10613 for (i = n - 1; i >= 0; i--)
10614 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10615 ret = targetm.fold_builtin (fndecl, arglist, false);
10616 if (ret)
10617 return ret;
10618 return build_call_array_loc (loc, type, fn, n, argarray);
10620 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10622 /* First try the transformations that don't require consing up
10623 an exp. */
10624 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10625 if (ret)
10626 return ret;
10629 /* If we got this far, we need to build an exp. */
10630 exp = build_call_array_loc (loc, type, fn, n, argarray);
10631 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10632 return ret ? ret : exp;
10636 return build_call_array_loc (loc, type, fn, n, argarray);
10639 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10640 along with N new arguments specified as the "..." parameters. SKIP
10641 is the number of arguments in EXP to be omitted. This function is used
10642 to do varargs-to-varargs transformations. */
10644 static tree
10645 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10647 int oldnargs = call_expr_nargs (exp);
10648 int nargs = oldnargs - skip + n;
10649 tree fntype = TREE_TYPE (fndecl);
10650 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10651 tree *buffer;
10653 if (n > 0)
10655 int i, j;
10656 va_list ap;
10658 buffer = XALLOCAVEC (tree, nargs);
10659 va_start (ap, n);
10660 for (i = 0; i < n; i++)
10661 buffer[i] = va_arg (ap, tree);
10662 va_end (ap);
10663 for (j = skip; j < oldnargs; j++, i++)
10664 buffer[i] = CALL_EXPR_ARG (exp, j);
10666 else
10667 buffer = CALL_EXPR_ARGP (exp) + skip;
10669 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10672 /* Validate a single argument ARG against a tree code CODE representing
10673 a type. */
10675 static bool
10676 validate_arg (const_tree arg, enum tree_code code)
10678 if (!arg)
10679 return false;
10680 else if (code == POINTER_TYPE)
10681 return POINTER_TYPE_P (TREE_TYPE (arg));
10682 else if (code == INTEGER_TYPE)
10683 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10684 return code == TREE_CODE (TREE_TYPE (arg));
10687 /* This function validates the types of a function call argument list
10688 against a specified list of tree_codes. If the last specifier is a 0,
10689 that represents an ellipses, otherwise the last specifier must be a
10690 VOID_TYPE.
10692 This is the GIMPLE version of validate_arglist. Eventually we want to
10693 completely convert builtins.c to work from GIMPLEs and the tree based
10694 validate_arglist will then be removed. */
10696 bool
10697 validate_gimple_arglist (const_gimple call, ...)
10699 enum tree_code code;
10700 bool res = 0;
10701 va_list ap;
10702 const_tree arg;
10703 size_t i;
10705 va_start (ap, call);
10706 i = 0;
10710 code = (enum tree_code) va_arg (ap, int);
10711 switch (code)
10713 case 0:
10714 /* This signifies an ellipses, any further arguments are all ok. */
10715 res = true;
10716 goto end;
10717 case VOID_TYPE:
10718 /* This signifies an endlink, if no arguments remain, return
10719 true, otherwise return false. */
10720 res = (i == gimple_call_num_args (call));
10721 goto end;
10722 default:
10723 /* If no parameters remain or the parameter's code does not
10724 match the specified code, return false. Otherwise continue
10725 checking any remaining arguments. */
10726 arg = gimple_call_arg (call, i++);
10727 if (!validate_arg (arg, code))
10728 goto end;
10729 break;
10732 while (1);
10734 /* We need gotos here since we can only have one VA_CLOSE in a
10735 function. */
10736 end: ;
10737 va_end (ap);
10739 return res;
10742 /* This function validates the types of a function call argument list
10743 against a specified list of tree_codes. If the last specifier is a 0,
10744 that represents an ellipses, otherwise the last specifier must be a
10745 VOID_TYPE. */
10747 bool
10748 validate_arglist (const_tree callexpr, ...)
10750 enum tree_code code;
10751 bool res = 0;
10752 va_list ap;
10753 const_call_expr_arg_iterator iter;
10754 const_tree arg;
10756 va_start (ap, callexpr);
10757 init_const_call_expr_arg_iterator (callexpr, &iter);
10761 code = (enum tree_code) va_arg (ap, int);
10762 switch (code)
10764 case 0:
10765 /* This signifies an ellipses, any further arguments are all ok. */
10766 res = true;
10767 goto end;
10768 case VOID_TYPE:
10769 /* This signifies an endlink, if no arguments remain, return
10770 true, otherwise return false. */
10771 res = !more_const_call_expr_args_p (&iter);
10772 goto end;
10773 default:
10774 /* If no parameters remain or the parameter's code does not
10775 match the specified code, return false. Otherwise continue
10776 checking any remaining arguments. */
10777 arg = next_const_call_expr_arg (&iter);
10778 if (!validate_arg (arg, code))
10779 goto end;
10780 break;
10783 while (1);
10785 /* We need gotos here since we can only have one VA_CLOSE in a
10786 function. */
10787 end: ;
10788 va_end (ap);
10790 return res;
10793 /* Default target-specific builtin expander that does nothing. */
10796 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10797 rtx target ATTRIBUTE_UNUSED,
10798 rtx subtarget ATTRIBUTE_UNUSED,
10799 enum machine_mode mode ATTRIBUTE_UNUSED,
10800 int ignore ATTRIBUTE_UNUSED)
10802 return NULL_RTX;
10805 /* Returns true is EXP represents data that would potentially reside
10806 in a readonly section. */
10808 static bool
10809 readonly_data_expr (tree exp)
10811 STRIP_NOPS (exp);
10813 if (TREE_CODE (exp) != ADDR_EXPR)
10814 return false;
10816 exp = get_base_address (TREE_OPERAND (exp, 0));
10817 if (!exp)
10818 return false;
10820 /* Make sure we call decl_readonly_section only for trees it
10821 can handle (since it returns true for everything it doesn't
10822 understand). */
10823 if (TREE_CODE (exp) == STRING_CST
10824 || TREE_CODE (exp) == CONSTRUCTOR
10825 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10826 return decl_readonly_section (exp, 0);
10827 else
10828 return false;
10831 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10832 to the call, and TYPE is its return type.
10834 Return NULL_TREE if no simplification was possible, otherwise return the
10835 simplified form of the call as a tree.
10837 The simplified form may be a constant or other expression which
10838 computes the same value, but in a more efficient manner (including
10839 calls to other builtin functions).
10841 The call may contain arguments which need to be evaluated, but
10842 which are not useful to determine the result of the call. In
10843 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10844 COMPOUND_EXPR will be an argument which must be evaluated.
10845 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10846 COMPOUND_EXPR in the chain will contain the tree for the simplified
10847 form of the builtin function call. */
10849 static tree
10850 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10852 if (!validate_arg (s1, POINTER_TYPE)
10853 || !validate_arg (s2, POINTER_TYPE))
10854 return NULL_TREE;
10855 else
10857 tree fn;
10858 const char *p1, *p2;
10860 p2 = c_getstr (s2);
10861 if (p2 == NULL)
10862 return NULL_TREE;
10864 p1 = c_getstr (s1);
10865 if (p1 != NULL)
10867 const char *r = strstr (p1, p2);
10868 tree tem;
10870 if (r == NULL)
10871 return build_int_cst (TREE_TYPE (s1), 0);
10873 /* Return an offset into the constant string argument. */
10874 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10875 s1, size_int (r - p1));
10876 return fold_convert_loc (loc, type, tem);
10879 /* The argument is const char *, and the result is char *, so we need
10880 a type conversion here to avoid a warning. */
10881 if (p2[0] == '\0')
10882 return fold_convert_loc (loc, type, s1);
10884 if (p2[1] != '\0')
10885 return NULL_TREE;
10887 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10888 if (!fn)
10889 return NULL_TREE;
10891 /* New argument list transforming strstr(s1, s2) to
10892 strchr(s1, s2[0]). */
10893 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10897 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10898 the call, and TYPE is its return type.
10900 Return NULL_TREE if no simplification was possible, otherwise return the
10901 simplified form of the call as a tree.
10903 The simplified form may be a constant or other expression which
10904 computes the same value, but in a more efficient manner (including
10905 calls to other builtin functions).
10907 The call may contain arguments which need to be evaluated, but
10908 which are not useful to determine the result of the call. In
10909 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10910 COMPOUND_EXPR will be an argument which must be evaluated.
10911 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10912 COMPOUND_EXPR in the chain will contain the tree for the simplified
10913 form of the builtin function call. */
10915 static tree
10916 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10918 if (!validate_arg (s1, POINTER_TYPE)
10919 || !validate_arg (s2, INTEGER_TYPE))
10920 return NULL_TREE;
10921 else
10923 const char *p1;
10925 if (TREE_CODE (s2) != INTEGER_CST)
10926 return NULL_TREE;
10928 p1 = c_getstr (s1);
10929 if (p1 != NULL)
10931 char c;
10932 const char *r;
10933 tree tem;
10935 if (target_char_cast (s2, &c))
10936 return NULL_TREE;
10938 r = strchr (p1, c);
10940 if (r == NULL)
10941 return build_int_cst (TREE_TYPE (s1), 0);
10943 /* Return an offset into the constant string argument. */
10944 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10945 s1, size_int (r - p1));
10946 return fold_convert_loc (loc, type, tem);
10948 return NULL_TREE;
10952 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10953 the call, and TYPE is its return type.
10955 Return NULL_TREE if no simplification was possible, otherwise return the
10956 simplified form of the call as a tree.
10958 The simplified form may be a constant or other expression which
10959 computes the same value, but in a more efficient manner (including
10960 calls to other builtin functions).
10962 The call may contain arguments which need to be evaluated, but
10963 which are not useful to determine the result of the call. In
10964 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10965 COMPOUND_EXPR will be an argument which must be evaluated.
10966 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10967 COMPOUND_EXPR in the chain will contain the tree for the simplified
10968 form of the builtin function call. */
10970 static tree
10971 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10973 if (!validate_arg (s1, POINTER_TYPE)
10974 || !validate_arg (s2, INTEGER_TYPE))
10975 return NULL_TREE;
10976 else
10978 tree fn;
10979 const char *p1;
10981 if (TREE_CODE (s2) != INTEGER_CST)
10982 return NULL_TREE;
10984 p1 = c_getstr (s1);
10985 if (p1 != NULL)
10987 char c;
10988 const char *r;
10989 tree tem;
10991 if (target_char_cast (s2, &c))
10992 return NULL_TREE;
10994 r = strrchr (p1, c);
10996 if (r == NULL)
10997 return build_int_cst (TREE_TYPE (s1), 0);
10999 /* Return an offset into the constant string argument. */
11000 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11001 s1, size_int (r - p1));
11002 return fold_convert_loc (loc, type, tem);
11005 if (! integer_zerop (s2))
11006 return NULL_TREE;
11008 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11009 if (!fn)
11010 return NULL_TREE;
11012 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11013 return build_call_expr_loc (loc, fn, 2, s1, s2);
11017 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11018 to the call, and TYPE is its return type.
11020 Return NULL_TREE if no simplification was possible, otherwise return the
11021 simplified form of the call as a tree.
11023 The simplified form may be a constant or other expression which
11024 computes the same value, but in a more efficient manner (including
11025 calls to other builtin functions).
11027 The call may contain arguments which need to be evaluated, but
11028 which are not useful to determine the result of the call. In
11029 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11030 COMPOUND_EXPR will be an argument which must be evaluated.
11031 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11032 COMPOUND_EXPR in the chain will contain the tree for the simplified
11033 form of the builtin function call. */
11035 static tree
11036 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11038 if (!validate_arg (s1, POINTER_TYPE)
11039 || !validate_arg (s2, POINTER_TYPE))
11040 return NULL_TREE;
11041 else
11043 tree fn;
11044 const char *p1, *p2;
11046 p2 = c_getstr (s2);
11047 if (p2 == NULL)
11048 return NULL_TREE;
11050 p1 = c_getstr (s1);
11051 if (p1 != NULL)
11053 const char *r = strpbrk (p1, p2);
11054 tree tem;
11056 if (r == NULL)
11057 return build_int_cst (TREE_TYPE (s1), 0);
11059 /* Return an offset into the constant string argument. */
11060 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11061 s1, size_int (r - p1));
11062 return fold_convert_loc (loc, type, tem);
11065 if (p2[0] == '\0')
11066 /* strpbrk(x, "") == NULL.
11067 Evaluate and ignore s1 in case it had side-effects. */
11068 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11070 if (p2[1] != '\0')
11071 return NULL_TREE; /* Really call strpbrk. */
11073 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11074 if (!fn)
11075 return NULL_TREE;
11077 /* New argument list transforming strpbrk(s1, s2) to
11078 strchr(s1, s2[0]). */
11079 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11083 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11084 to the call.
11086 Return NULL_TREE if no simplification was possible, otherwise return the
11087 simplified form of the call as a tree.
11089 The simplified form may be a constant or other expression which
11090 computes the same value, but in a more efficient manner (including
11091 calls to other builtin functions).
11093 The call may contain arguments which need to be evaluated, but
11094 which are not useful to determine the result of the call. In
11095 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11096 COMPOUND_EXPR will be an argument which must be evaluated.
11097 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11098 COMPOUND_EXPR in the chain will contain the tree for the simplified
11099 form of the builtin function call. */
11101 static tree
11102 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11104 if (!validate_arg (dst, POINTER_TYPE)
11105 || !validate_arg (src, POINTER_TYPE))
11106 return NULL_TREE;
11107 else
11109 const char *p = c_getstr (src);
11111 /* If the string length is zero, return the dst parameter. */
11112 if (p && *p == '\0')
11113 return dst;
11115 if (optimize_insn_for_speed_p ())
11117 /* See if we can store by pieces into (dst + strlen(dst)). */
11118 tree newdst, call;
11119 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11120 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11122 if (!strlen_fn || !strcpy_fn)
11123 return NULL_TREE;
11125 /* If we don't have a movstr we don't want to emit an strcpy
11126 call. We have to do that if the length of the source string
11127 isn't computable (in that case we can use memcpy probably
11128 later expanding to a sequence of mov instructions). If we
11129 have movstr instructions we can emit strcpy calls. */
11130 if (!HAVE_movstr)
11132 tree len = c_strlen (src, 1);
11133 if (! len || TREE_SIDE_EFFECTS (len))
11134 return NULL_TREE;
11137 /* Stabilize the argument list. */
11138 dst = builtin_save_expr (dst);
11140 /* Create strlen (dst). */
11141 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11142 /* Create (dst p+ strlen (dst)). */
11144 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11145 TREE_TYPE (dst), dst, newdst);
11146 newdst = builtin_save_expr (newdst);
11148 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11149 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11151 return NULL_TREE;
11155 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11156 arguments to the call.
11158 Return NULL_TREE if no simplification was possible, otherwise return the
11159 simplified form of the call as a tree.
11161 The simplified form may be a constant or other expression which
11162 computes the same value, but in a more efficient manner (including
11163 calls to other builtin functions).
11165 The call may contain arguments which need to be evaluated, but
11166 which are not useful to determine the result of the call. In
11167 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11168 COMPOUND_EXPR will be an argument which must be evaluated.
11169 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11170 COMPOUND_EXPR in the chain will contain the tree for the simplified
11171 form of the builtin function call. */
11173 static tree
11174 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11176 if (!validate_arg (dst, POINTER_TYPE)
11177 || !validate_arg (src, POINTER_TYPE)
11178 || !validate_arg (len, INTEGER_TYPE))
11179 return NULL_TREE;
11180 else
11182 const char *p = c_getstr (src);
11184 /* If the requested length is zero, or the src parameter string
11185 length is zero, return the dst parameter. */
11186 if (integer_zerop (len) || (p && *p == '\0'))
11187 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11189 /* If the requested len is greater than or equal to the string
11190 length, call strcat. */
11191 if (TREE_CODE (len) == INTEGER_CST && p
11192 && compare_tree_int (len, strlen (p)) >= 0)
11194 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11196 /* If the replacement _DECL isn't initialized, don't do the
11197 transformation. */
11198 if (!fn)
11199 return NULL_TREE;
11201 return build_call_expr_loc (loc, fn, 2, dst, src);
11203 return NULL_TREE;
11207 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11208 to the call.
11210 Return NULL_TREE if no simplification was possible, otherwise return the
11211 simplified form of the call as a tree.
11213 The simplified form may be a constant or other expression which
11214 computes the same value, but in a more efficient manner (including
11215 calls to other builtin functions).
11217 The call may contain arguments which need to be evaluated, but
11218 which are not useful to determine the result of the call. In
11219 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11220 COMPOUND_EXPR will be an argument which must be evaluated.
11221 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11222 COMPOUND_EXPR in the chain will contain the tree for the simplified
11223 form of the builtin function call. */
11225 static tree
11226 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11228 if (!validate_arg (s1, POINTER_TYPE)
11229 || !validate_arg (s2, POINTER_TYPE))
11230 return NULL_TREE;
11231 else
11233 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11235 /* If both arguments are constants, evaluate at compile-time. */
11236 if (p1 && p2)
11238 const size_t r = strspn (p1, p2);
11239 return size_int (r);
11242 /* If either argument is "", return NULL_TREE. */
11243 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11244 /* Evaluate and ignore both arguments in case either one has
11245 side-effects. */
11246 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11247 s1, s2);
11248 return NULL_TREE;
11252 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11253 to the call.
11255 Return NULL_TREE if no simplification was possible, otherwise return the
11256 simplified form of the call as a tree.
11258 The simplified form may be a constant or other expression which
11259 computes the same value, but in a more efficient manner (including
11260 calls to other builtin functions).
11262 The call may contain arguments which need to be evaluated, but
11263 which are not useful to determine the result of the call. In
11264 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11265 COMPOUND_EXPR will be an argument which must be evaluated.
11266 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11267 COMPOUND_EXPR in the chain will contain the tree for the simplified
11268 form of the builtin function call. */
11270 static tree
11271 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11273 if (!validate_arg (s1, POINTER_TYPE)
11274 || !validate_arg (s2, POINTER_TYPE))
11275 return NULL_TREE;
11276 else
11278 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11280 /* If both arguments are constants, evaluate at compile-time. */
11281 if (p1 && p2)
11283 const size_t r = strcspn (p1, p2);
11284 return size_int (r);
11287 /* If the first argument is "", return NULL_TREE. */
11288 if (p1 && *p1 == '\0')
11290 /* Evaluate and ignore argument s2 in case it has
11291 side-effects. */
11292 return omit_one_operand_loc (loc, size_type_node,
11293 size_zero_node, s2);
11296 /* If the second argument is "", return __builtin_strlen(s1). */
11297 if (p2 && *p2 == '\0')
11299 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11301 /* If the replacement _DECL isn't initialized, don't do the
11302 transformation. */
11303 if (!fn)
11304 return NULL_TREE;
11306 return build_call_expr_loc (loc, fn, 1, s1);
11308 return NULL_TREE;
11312 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11313 to the call. IGNORE is true if the value returned
11314 by the builtin will be ignored. UNLOCKED is true is true if this
11315 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11316 the known length of the string. Return NULL_TREE if no simplification
11317 was possible. */
11319 tree
11320 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11321 bool ignore, bool unlocked, tree len)
11323 /* If we're using an unlocked function, assume the other unlocked
11324 functions exist explicitly. */
11325 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11326 : implicit_built_in_decls[BUILT_IN_FPUTC];
11327 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11328 : implicit_built_in_decls[BUILT_IN_FWRITE];
11330 /* If the return value is used, don't do the transformation. */
11331 if (!ignore)
11332 return NULL_TREE;
11334 /* Verify the arguments in the original call. */
11335 if (!validate_arg (arg0, POINTER_TYPE)
11336 || !validate_arg (arg1, POINTER_TYPE))
11337 return NULL_TREE;
11339 if (! len)
11340 len = c_strlen (arg0, 0);
11342 /* Get the length of the string passed to fputs. If the length
11343 can't be determined, punt. */
11344 if (!len
11345 || TREE_CODE (len) != INTEGER_CST)
11346 return NULL_TREE;
11348 switch (compare_tree_int (len, 1))
11350 case -1: /* length is 0, delete the call entirely . */
11351 return omit_one_operand_loc (loc, integer_type_node,
11352 integer_zero_node, arg1);;
11354 case 0: /* length is 1, call fputc. */
11356 const char *p = c_getstr (arg0);
11358 if (p != NULL)
11360 if (fn_fputc)
11361 return build_call_expr_loc (loc, fn_fputc, 2,
11362 build_int_cst (NULL_TREE, p[0]), arg1);
11363 else
11364 return NULL_TREE;
11367 /* FALLTHROUGH */
11368 case 1: /* length is greater than 1, call fwrite. */
11370 /* If optimizing for size keep fputs. */
11371 if (optimize_function_for_size_p (cfun))
11372 return NULL_TREE;
11373 /* New argument list transforming fputs(string, stream) to
11374 fwrite(string, 1, len, stream). */
11375 if (fn_fwrite)
11376 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11377 size_one_node, len, arg1);
11378 else
11379 return NULL_TREE;
11381 default:
11382 gcc_unreachable ();
11384 return NULL_TREE;
11387 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11388 produced. False otherwise. This is done so that we don't output the error
11389 or warning twice or three times. */
11391 bool
11392 fold_builtin_next_arg (tree exp, bool va_start_p)
11394 tree fntype = TREE_TYPE (current_function_decl);
11395 int nargs = call_expr_nargs (exp);
11396 tree arg;
11398 if (TYPE_ARG_TYPES (fntype) == 0
11399 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11400 == void_type_node))
11402 error ("%<va_start%> used in function with fixed args");
11403 return true;
11406 if (va_start_p)
11408 if (va_start_p && (nargs != 2))
11410 error ("wrong number of arguments to function %<va_start%>");
11411 return true;
11413 arg = CALL_EXPR_ARG (exp, 1);
11415 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11416 when we checked the arguments and if needed issued a warning. */
11417 else
11419 if (nargs == 0)
11421 /* Evidently an out of date version of <stdarg.h>; can't validate
11422 va_start's second argument, but can still work as intended. */
11423 warning (0, "%<__builtin_next_arg%> called without an argument");
11424 return true;
11426 else if (nargs > 1)
11428 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11429 return true;
11431 arg = CALL_EXPR_ARG (exp, 0);
11434 if (TREE_CODE (arg) == SSA_NAME)
11435 arg = SSA_NAME_VAR (arg);
11437 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11438 or __builtin_next_arg (0) the first time we see it, after checking
11439 the arguments and if needed issuing a warning. */
11440 if (!integer_zerop (arg))
11442 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11444 /* Strip off all nops for the sake of the comparison. This
11445 is not quite the same as STRIP_NOPS. It does more.
11446 We must also strip off INDIRECT_EXPR for C++ reference
11447 parameters. */
11448 while (CONVERT_EXPR_P (arg)
11449 || TREE_CODE (arg) == INDIRECT_REF)
11450 arg = TREE_OPERAND (arg, 0);
11451 if (arg != last_parm)
11453 /* FIXME: Sometimes with the tree optimizers we can get the
11454 not the last argument even though the user used the last
11455 argument. We just warn and set the arg to be the last
11456 argument so that we will get wrong-code because of
11457 it. */
11458 warning (0, "second parameter of %<va_start%> not last named argument");
11461 /* Undefined by C99 7.15.1.4p4 (va_start):
11462 "If the parameter parmN is declared with the register storage
11463 class, with a function or array type, or with a type that is
11464 not compatible with the type that results after application of
11465 the default argument promotions, the behavior is undefined."
11467 else if (DECL_REGISTER (arg))
11468 warning (0, "undefined behaviour when second parameter of "
11469 "%<va_start%> is declared with %<register%> storage");
11471 /* We want to verify the second parameter just once before the tree
11472 optimizers are run and then avoid keeping it in the tree,
11473 as otherwise we could warn even for correct code like:
11474 void foo (int i, ...)
11475 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11476 if (va_start_p)
11477 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11478 else
11479 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11481 return false;
11485 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11486 ORIG may be null if this is a 2-argument call. We don't attempt to
11487 simplify calls with more than 3 arguments.
11489 Return NULL_TREE if no simplification was possible, otherwise return the
11490 simplified form of the call as a tree. If IGNORED is true, it means that
11491 the caller does not use the returned value of the function. */
11493 static tree
11494 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11495 tree orig, int ignored)
11497 tree call, retval;
11498 const char *fmt_str = NULL;
11500 /* Verify the required arguments in the original call. We deal with two
11501 types of sprintf() calls: 'sprintf (str, fmt)' and
11502 'sprintf (dest, "%s", orig)'. */
11503 if (!validate_arg (dest, POINTER_TYPE)
11504 || !validate_arg (fmt, POINTER_TYPE))
11505 return NULL_TREE;
11506 if (orig && !validate_arg (orig, POINTER_TYPE))
11507 return NULL_TREE;
11509 /* Check whether the format is a literal string constant. */
11510 fmt_str = c_getstr (fmt);
11511 if (fmt_str == NULL)
11512 return NULL_TREE;
11514 call = NULL_TREE;
11515 retval = NULL_TREE;
11517 if (!init_target_chars ())
11518 return NULL_TREE;
11520 /* If the format doesn't contain % args or %%, use strcpy. */
11521 if (strchr (fmt_str, target_percent) == NULL)
11523 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11525 if (!fn)
11526 return NULL_TREE;
11528 /* Don't optimize sprintf (buf, "abc", ptr++). */
11529 if (orig)
11530 return NULL_TREE;
11532 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11533 'format' is known to contain no % formats. */
11534 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11535 if (!ignored)
11536 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11539 /* If the format is "%s", use strcpy if the result isn't used. */
11540 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11542 tree fn;
11543 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11545 if (!fn)
11546 return NULL_TREE;
11548 /* Don't crash on sprintf (str1, "%s"). */
11549 if (!orig)
11550 return NULL_TREE;
11552 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11553 if (!ignored)
11555 retval = c_strlen (orig, 1);
11556 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11557 return NULL_TREE;
11559 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11562 if (call && retval)
11564 retval = fold_convert_loc
11565 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11566 retval);
11567 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11569 else
11570 return call;
11573 /* Expand a call EXP to __builtin_object_size. */
11576 expand_builtin_object_size (tree exp)
11578 tree ost;
11579 int object_size_type;
11580 tree fndecl = get_callee_fndecl (exp);
11582 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11584 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11585 exp, fndecl);
11586 expand_builtin_trap ();
11587 return const0_rtx;
11590 ost = CALL_EXPR_ARG (exp, 1);
11591 STRIP_NOPS (ost);
11593 if (TREE_CODE (ost) != INTEGER_CST
11594 || tree_int_cst_sgn (ost) < 0
11595 || compare_tree_int (ost, 3) > 0)
11597 error ("%Klast argument of %D is not integer constant between 0 and 3",
11598 exp, fndecl);
11599 expand_builtin_trap ();
11600 return const0_rtx;
11603 object_size_type = tree_low_cst (ost, 0);
11605 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11608 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11609 FCODE is the BUILT_IN_* to use.
11610 Return NULL_RTX if we failed; the caller should emit a normal call,
11611 otherwise try to get the result in TARGET, if convenient (and in
11612 mode MODE if that's convenient). */
11614 static rtx
11615 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11616 enum built_in_function fcode)
11618 tree dest, src, len, size;
11620 if (!validate_arglist (exp,
11621 POINTER_TYPE,
11622 fcode == BUILT_IN_MEMSET_CHK
11623 ? INTEGER_TYPE : POINTER_TYPE,
11624 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11625 return NULL_RTX;
11627 dest = CALL_EXPR_ARG (exp, 0);
11628 src = CALL_EXPR_ARG (exp, 1);
11629 len = CALL_EXPR_ARG (exp, 2);
11630 size = CALL_EXPR_ARG (exp, 3);
11632 if (! host_integerp (size, 1))
11633 return NULL_RTX;
11635 if (host_integerp (len, 1) || integer_all_onesp (size))
11637 tree fn;
11639 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11641 warning_at (tree_nonartificial_location (exp),
11642 0, "%Kcall to %D will always overflow destination buffer",
11643 exp, get_callee_fndecl (exp));
11644 return NULL_RTX;
11647 fn = NULL_TREE;
11648 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11649 mem{cpy,pcpy,move,set} is available. */
11650 switch (fcode)
11652 case BUILT_IN_MEMCPY_CHK:
11653 fn = built_in_decls[BUILT_IN_MEMCPY];
11654 break;
11655 case BUILT_IN_MEMPCPY_CHK:
11656 fn = built_in_decls[BUILT_IN_MEMPCPY];
11657 break;
11658 case BUILT_IN_MEMMOVE_CHK:
11659 fn = built_in_decls[BUILT_IN_MEMMOVE];
11660 break;
11661 case BUILT_IN_MEMSET_CHK:
11662 fn = built_in_decls[BUILT_IN_MEMSET];
11663 break;
11664 default:
11665 break;
11668 if (! fn)
11669 return NULL_RTX;
11671 fn = build_call_nofold (fn, 3, dest, src, len);
11672 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11673 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11674 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11676 else if (fcode == BUILT_IN_MEMSET_CHK)
11677 return NULL_RTX;
11678 else
11680 unsigned int dest_align
11681 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11683 /* If DEST is not a pointer type, call the normal function. */
11684 if (dest_align == 0)
11685 return NULL_RTX;
11687 /* If SRC and DEST are the same (and not volatile), do nothing. */
11688 if (operand_equal_p (src, dest, 0))
11690 tree expr;
11692 if (fcode != BUILT_IN_MEMPCPY_CHK)
11694 /* Evaluate and ignore LEN in case it has side-effects. */
11695 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11696 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11699 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11700 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11703 /* __memmove_chk special case. */
11704 if (fcode == BUILT_IN_MEMMOVE_CHK)
11706 unsigned int src_align
11707 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11709 if (src_align == 0)
11710 return NULL_RTX;
11712 /* If src is categorized for a readonly section we can use
11713 normal __memcpy_chk. */
11714 if (readonly_data_expr (src))
11716 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11717 if (!fn)
11718 return NULL_RTX;
11719 fn = build_call_nofold (fn, 4, dest, src, len, size);
11720 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11721 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11722 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11725 return NULL_RTX;
11729 /* Emit warning if a buffer overflow is detected at compile time. */
11731 static void
11732 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11734 int is_strlen = 0;
11735 tree len, size;
11736 location_t loc = tree_nonartificial_location (exp);
11738 switch (fcode)
11740 case BUILT_IN_STRCPY_CHK:
11741 case BUILT_IN_STPCPY_CHK:
11742 /* For __strcat_chk the warning will be emitted only if overflowing
11743 by at least strlen (dest) + 1 bytes. */
11744 case BUILT_IN_STRCAT_CHK:
11745 len = CALL_EXPR_ARG (exp, 1);
11746 size = CALL_EXPR_ARG (exp, 2);
11747 is_strlen = 1;
11748 break;
11749 case BUILT_IN_STRNCAT_CHK:
11750 case BUILT_IN_STRNCPY_CHK:
11751 len = CALL_EXPR_ARG (exp, 2);
11752 size = CALL_EXPR_ARG (exp, 3);
11753 break;
11754 case BUILT_IN_SNPRINTF_CHK:
11755 case BUILT_IN_VSNPRINTF_CHK:
11756 len = CALL_EXPR_ARG (exp, 1);
11757 size = CALL_EXPR_ARG (exp, 3);
11758 break;
11759 default:
11760 gcc_unreachable ();
11763 if (!len || !size)
11764 return;
11766 if (! host_integerp (size, 1) || integer_all_onesp (size))
11767 return;
11769 if (is_strlen)
11771 len = c_strlen (len, 1);
11772 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11773 return;
11775 else if (fcode == BUILT_IN_STRNCAT_CHK)
11777 tree src = CALL_EXPR_ARG (exp, 1);
11778 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11779 return;
11780 src = c_strlen (src, 1);
11781 if (! src || ! host_integerp (src, 1))
11783 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11784 exp, get_callee_fndecl (exp));
11785 return;
11787 else if (tree_int_cst_lt (src, size))
11788 return;
11790 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11791 return;
11793 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11794 exp, get_callee_fndecl (exp));
11797 /* Emit warning if a buffer overflow is detected at compile time
11798 in __sprintf_chk/__vsprintf_chk calls. */
11800 static void
11801 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11803 tree size, len, fmt;
11804 const char *fmt_str;
11805 int nargs = call_expr_nargs (exp);
11807 /* Verify the required arguments in the original call. */
11809 if (nargs < 4)
11810 return;
11811 size = CALL_EXPR_ARG (exp, 2);
11812 fmt = CALL_EXPR_ARG (exp, 3);
11814 if (! host_integerp (size, 1) || integer_all_onesp (size))
11815 return;
11817 /* Check whether the format is a literal string constant. */
11818 fmt_str = c_getstr (fmt);
11819 if (fmt_str == NULL)
11820 return;
11822 if (!init_target_chars ())
11823 return;
11825 /* If the format doesn't contain % args or %%, we know its size. */
11826 if (strchr (fmt_str, target_percent) == 0)
11827 len = build_int_cstu (size_type_node, strlen (fmt_str));
11828 /* If the format is "%s" and first ... argument is a string literal,
11829 we know it too. */
11830 else if (fcode == BUILT_IN_SPRINTF_CHK
11831 && strcmp (fmt_str, target_percent_s) == 0)
11833 tree arg;
11835 if (nargs < 5)
11836 return;
11837 arg = CALL_EXPR_ARG (exp, 4);
11838 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11839 return;
11841 len = c_strlen (arg, 1);
11842 if (!len || ! host_integerp (len, 1))
11843 return;
11845 else
11846 return;
11848 if (! tree_int_cst_lt (len, size))
11849 warning_at (tree_nonartificial_location (exp),
11850 0, "%Kcall to %D will always overflow destination buffer",
11851 exp, get_callee_fndecl (exp));
11854 /* Emit warning if a free is called with address of a variable. */
11856 static void
11857 maybe_emit_free_warning (tree exp)
11859 tree arg = CALL_EXPR_ARG (exp, 0);
11861 STRIP_NOPS (arg);
11862 if (TREE_CODE (arg) != ADDR_EXPR)
11863 return;
11865 arg = get_base_address (TREE_OPERAND (arg, 0));
11866 if (arg == NULL || INDIRECT_REF_P (arg))
11867 return;
11869 if (SSA_VAR_P (arg))
11870 warning_at (tree_nonartificial_location (exp),
11871 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11872 else
11873 warning_at (tree_nonartificial_location (exp),
11874 0, "%Kattempt to free a non-heap object", exp);
11877 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11878 if possible. */
11880 tree
11881 fold_builtin_object_size (tree ptr, tree ost)
11883 tree ret = NULL_TREE;
11884 int object_size_type;
11886 if (!validate_arg (ptr, POINTER_TYPE)
11887 || !validate_arg (ost, INTEGER_TYPE))
11888 return NULL_TREE;
11890 STRIP_NOPS (ost);
11892 if (TREE_CODE (ost) != INTEGER_CST
11893 || tree_int_cst_sgn (ost) < 0
11894 || compare_tree_int (ost, 3) > 0)
11895 return NULL_TREE;
11897 object_size_type = tree_low_cst (ost, 0);
11899 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11900 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11901 and (size_t) 0 for types 2 and 3. */
11902 if (TREE_SIDE_EFFECTS (ptr))
11903 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11905 if (TREE_CODE (ptr) == ADDR_EXPR)
11906 ret = build_int_cstu (size_type_node,
11907 compute_builtin_object_size (ptr, object_size_type));
11909 else if (TREE_CODE (ptr) == SSA_NAME)
11911 unsigned HOST_WIDE_INT bytes;
11913 /* If object size is not known yet, delay folding until
11914 later. Maybe subsequent passes will help determining
11915 it. */
11916 bytes = compute_builtin_object_size (ptr, object_size_type);
11917 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11918 ? -1 : 0))
11919 ret = build_int_cstu (size_type_node, bytes);
11922 if (ret)
11924 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11925 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11926 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11927 ret = NULL_TREE;
11930 return ret;
11933 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11934 DEST, SRC, LEN, and SIZE are the arguments to the call.
11935 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11936 code of the builtin. If MAXLEN is not NULL, it is maximum length
11937 passed as third argument. */
11939 tree
11940 fold_builtin_memory_chk (location_t loc, tree fndecl,
11941 tree dest, tree src, tree len, tree size,
11942 tree maxlen, bool ignore,
11943 enum built_in_function fcode)
11945 tree fn;
11947 if (!validate_arg (dest, POINTER_TYPE)
11948 || !validate_arg (src,
11949 (fcode == BUILT_IN_MEMSET_CHK
11950 ? INTEGER_TYPE : POINTER_TYPE))
11951 || !validate_arg (len, INTEGER_TYPE)
11952 || !validate_arg (size, INTEGER_TYPE))
11953 return NULL_TREE;
11955 /* If SRC and DEST are the same (and not volatile), return DEST
11956 (resp. DEST+LEN for __mempcpy_chk). */
11957 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11959 if (fcode != BUILT_IN_MEMPCPY_CHK)
11960 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
11961 dest, len);
11962 else
11964 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
11965 dest, len);
11966 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
11970 if (! host_integerp (size, 1))
11971 return NULL_TREE;
11973 if (! integer_all_onesp (size))
11975 if (! host_integerp (len, 1))
11977 /* If LEN is not constant, try MAXLEN too.
11978 For MAXLEN only allow optimizing into non-_ocs function
11979 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11980 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11982 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11984 /* (void) __mempcpy_chk () can be optimized into
11985 (void) __memcpy_chk (). */
11986 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11987 if (!fn)
11988 return NULL_TREE;
11990 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
11992 return NULL_TREE;
11995 else
11996 maxlen = len;
11998 if (tree_int_cst_lt (size, maxlen))
11999 return NULL_TREE;
12002 fn = NULL_TREE;
12003 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12004 mem{cpy,pcpy,move,set} is available. */
12005 switch (fcode)
12007 case BUILT_IN_MEMCPY_CHK:
12008 fn = built_in_decls[BUILT_IN_MEMCPY];
12009 break;
12010 case BUILT_IN_MEMPCPY_CHK:
12011 fn = built_in_decls[BUILT_IN_MEMPCPY];
12012 break;
12013 case BUILT_IN_MEMMOVE_CHK:
12014 fn = built_in_decls[BUILT_IN_MEMMOVE];
12015 break;
12016 case BUILT_IN_MEMSET_CHK:
12017 fn = built_in_decls[BUILT_IN_MEMSET];
12018 break;
12019 default:
12020 break;
12023 if (!fn)
12024 return NULL_TREE;
12026 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12029 /* Fold a call to the __st[rp]cpy_chk builtin.
12030 DEST, SRC, and SIZE are the arguments to the call.
12031 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12032 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12033 strings passed as second argument. */
12035 tree
12036 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12037 tree src, tree size,
12038 tree maxlen, bool ignore,
12039 enum built_in_function fcode)
12041 tree len, fn;
12043 if (!validate_arg (dest, POINTER_TYPE)
12044 || !validate_arg (src, POINTER_TYPE)
12045 || !validate_arg (size, INTEGER_TYPE))
12046 return NULL_TREE;
12048 /* If SRC and DEST are the same (and not volatile), return DEST. */
12049 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12050 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12052 if (! host_integerp (size, 1))
12053 return NULL_TREE;
12055 if (! integer_all_onesp (size))
12057 len = c_strlen (src, 1);
12058 if (! len || ! host_integerp (len, 1))
12060 /* If LEN is not constant, try MAXLEN too.
12061 For MAXLEN only allow optimizing into non-_ocs function
12062 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12063 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12065 if (fcode == BUILT_IN_STPCPY_CHK)
12067 if (! ignore)
12068 return NULL_TREE;
12070 /* If return value of __stpcpy_chk is ignored,
12071 optimize into __strcpy_chk. */
12072 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12073 if (!fn)
12074 return NULL_TREE;
12076 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12079 if (! len || TREE_SIDE_EFFECTS (len))
12080 return NULL_TREE;
12082 /* If c_strlen returned something, but not a constant,
12083 transform __strcpy_chk into __memcpy_chk. */
12084 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12085 if (!fn)
12086 return NULL_TREE;
12088 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12089 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12090 build_call_expr_loc (loc, fn, 4,
12091 dest, src, len, size));
12094 else
12095 maxlen = len;
12097 if (! tree_int_cst_lt (maxlen, size))
12098 return NULL_TREE;
12101 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12102 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12103 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12104 if (!fn)
12105 return NULL_TREE;
12107 return build_call_expr_loc (loc, fn, 2, dest, src);
12110 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12111 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12112 length passed as third argument. */
12114 tree
12115 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12116 tree len, tree size, tree maxlen)
12118 tree fn;
12120 if (!validate_arg (dest, POINTER_TYPE)
12121 || !validate_arg (src, POINTER_TYPE)
12122 || !validate_arg (len, INTEGER_TYPE)
12123 || !validate_arg (size, INTEGER_TYPE))
12124 return NULL_TREE;
12126 if (! host_integerp (size, 1))
12127 return NULL_TREE;
12129 if (! integer_all_onesp (size))
12131 if (! host_integerp (len, 1))
12133 /* If LEN is not constant, try MAXLEN too.
12134 For MAXLEN only allow optimizing into non-_ocs function
12135 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12136 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12137 return NULL_TREE;
12139 else
12140 maxlen = len;
12142 if (tree_int_cst_lt (size, maxlen))
12143 return NULL_TREE;
12146 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12147 fn = built_in_decls[BUILT_IN_STRNCPY];
12148 if (!fn)
12149 return NULL_TREE;
12151 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12154 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12155 are the arguments to the call. */
12157 static tree
12158 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12159 tree src, tree size)
12161 tree fn;
12162 const char *p;
12164 if (!validate_arg (dest, POINTER_TYPE)
12165 || !validate_arg (src, POINTER_TYPE)
12166 || !validate_arg (size, INTEGER_TYPE))
12167 return NULL_TREE;
12169 p = c_getstr (src);
12170 /* If the SRC parameter is "", return DEST. */
12171 if (p && *p == '\0')
12172 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12174 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12175 return NULL_TREE;
12177 /* If __builtin_strcat_chk is used, assume strcat is available. */
12178 fn = built_in_decls[BUILT_IN_STRCAT];
12179 if (!fn)
12180 return NULL_TREE;
12182 return build_call_expr_loc (loc, fn, 2, dest, src);
12185 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12186 LEN, and SIZE. */
12188 static tree
12189 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12190 tree dest, tree src, tree len, tree size)
12192 tree fn;
12193 const char *p;
12195 if (!validate_arg (dest, POINTER_TYPE)
12196 || !validate_arg (src, POINTER_TYPE)
12197 || !validate_arg (size, INTEGER_TYPE)
12198 || !validate_arg (size, INTEGER_TYPE))
12199 return NULL_TREE;
12201 p = c_getstr (src);
12202 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12203 if (p && *p == '\0')
12204 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12205 else if (integer_zerop (len))
12206 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12208 if (! host_integerp (size, 1))
12209 return NULL_TREE;
12211 if (! integer_all_onesp (size))
12213 tree src_len = c_strlen (src, 1);
12214 if (src_len
12215 && host_integerp (src_len, 1)
12216 && host_integerp (len, 1)
12217 && ! tree_int_cst_lt (len, src_len))
12219 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12220 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12221 if (!fn)
12222 return NULL_TREE;
12224 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12226 return NULL_TREE;
12229 /* If __builtin_strncat_chk is used, assume strncat is available. */
12230 fn = built_in_decls[BUILT_IN_STRNCAT];
12231 if (!fn)
12232 return NULL_TREE;
12234 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12237 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12238 a normal call should be emitted rather than expanding the function
12239 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12241 static tree
12242 fold_builtin_sprintf_chk (location_t loc, tree exp,
12243 enum built_in_function fcode)
12245 tree dest, size, len, fn, fmt, flag;
12246 const char *fmt_str;
12247 int nargs = call_expr_nargs (exp);
12249 /* Verify the required arguments in the original call. */
12250 if (nargs < 4)
12251 return NULL_TREE;
12252 dest = CALL_EXPR_ARG (exp, 0);
12253 if (!validate_arg (dest, POINTER_TYPE))
12254 return NULL_TREE;
12255 flag = CALL_EXPR_ARG (exp, 1);
12256 if (!validate_arg (flag, INTEGER_TYPE))
12257 return NULL_TREE;
12258 size = CALL_EXPR_ARG (exp, 2);
12259 if (!validate_arg (size, INTEGER_TYPE))
12260 return NULL_TREE;
12261 fmt = CALL_EXPR_ARG (exp, 3);
12262 if (!validate_arg (fmt, POINTER_TYPE))
12263 return NULL_TREE;
12265 if (! host_integerp (size, 1))
12266 return NULL_TREE;
12268 len = NULL_TREE;
12270 if (!init_target_chars ())
12271 return NULL_TREE;
12273 /* Check whether the format is a literal string constant. */
12274 fmt_str = c_getstr (fmt);
12275 if (fmt_str != NULL)
12277 /* If the format doesn't contain % args or %%, we know the size. */
12278 if (strchr (fmt_str, target_percent) == 0)
12280 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12281 len = build_int_cstu (size_type_node, strlen (fmt_str));
12283 /* If the format is "%s" and first ... argument is a string literal,
12284 we know the size too. */
12285 else if (fcode == BUILT_IN_SPRINTF_CHK
12286 && strcmp (fmt_str, target_percent_s) == 0)
12288 tree arg;
12290 if (nargs == 5)
12292 arg = CALL_EXPR_ARG (exp, 4);
12293 if (validate_arg (arg, POINTER_TYPE))
12295 len = c_strlen (arg, 1);
12296 if (! len || ! host_integerp (len, 1))
12297 len = NULL_TREE;
12303 if (! integer_all_onesp (size))
12305 if (! len || ! tree_int_cst_lt (len, size))
12306 return NULL_TREE;
12309 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12310 or if format doesn't contain % chars or is "%s". */
12311 if (! integer_zerop (flag))
12313 if (fmt_str == NULL)
12314 return NULL_TREE;
12315 if (strchr (fmt_str, target_percent) != NULL
12316 && strcmp (fmt_str, target_percent_s))
12317 return NULL_TREE;
12320 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12321 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12322 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12323 if (!fn)
12324 return NULL_TREE;
12326 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12329 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12330 a normal call should be emitted rather than expanding the function
12331 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12332 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12333 passed as second argument. */
12335 tree
12336 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12337 enum built_in_function fcode)
12339 tree dest, size, len, fn, fmt, flag;
12340 const char *fmt_str;
12342 /* Verify the required arguments in the original call. */
12343 if (call_expr_nargs (exp) < 5)
12344 return NULL_TREE;
12345 dest = CALL_EXPR_ARG (exp, 0);
12346 if (!validate_arg (dest, POINTER_TYPE))
12347 return NULL_TREE;
12348 len = CALL_EXPR_ARG (exp, 1);
12349 if (!validate_arg (len, INTEGER_TYPE))
12350 return NULL_TREE;
12351 flag = CALL_EXPR_ARG (exp, 2);
12352 if (!validate_arg (flag, INTEGER_TYPE))
12353 return NULL_TREE;
12354 size = CALL_EXPR_ARG (exp, 3);
12355 if (!validate_arg (size, INTEGER_TYPE))
12356 return NULL_TREE;
12357 fmt = CALL_EXPR_ARG (exp, 4);
12358 if (!validate_arg (fmt, POINTER_TYPE))
12359 return NULL_TREE;
12361 if (! host_integerp (size, 1))
12362 return NULL_TREE;
12364 if (! integer_all_onesp (size))
12366 if (! host_integerp (len, 1))
12368 /* If LEN is not constant, try MAXLEN too.
12369 For MAXLEN only allow optimizing into non-_ocs function
12370 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12371 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12372 return NULL_TREE;
12374 else
12375 maxlen = len;
12377 if (tree_int_cst_lt (size, maxlen))
12378 return NULL_TREE;
12381 if (!init_target_chars ())
12382 return NULL_TREE;
12384 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12385 or if format doesn't contain % chars or is "%s". */
12386 if (! integer_zerop (flag))
12388 fmt_str = c_getstr (fmt);
12389 if (fmt_str == NULL)
12390 return NULL_TREE;
12391 if (strchr (fmt_str, target_percent) != NULL
12392 && strcmp (fmt_str, target_percent_s))
12393 return NULL_TREE;
12396 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12397 available. */
12398 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12399 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12400 if (!fn)
12401 return NULL_TREE;
12403 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12406 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12407 FMT and ARG are the arguments to the call; we don't fold cases with
12408 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12410 Return NULL_TREE if no simplification was possible, otherwise return the
12411 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12412 code of the function to be simplified. */
12414 static tree
12415 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12416 tree arg, bool ignore,
12417 enum built_in_function fcode)
12419 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12420 const char *fmt_str = NULL;
12422 /* If the return value is used, don't do the transformation. */
12423 if (! ignore)
12424 return NULL_TREE;
12426 /* Verify the required arguments in the original call. */
12427 if (!validate_arg (fmt, POINTER_TYPE))
12428 return NULL_TREE;
12430 /* Check whether the format is a literal string constant. */
12431 fmt_str = c_getstr (fmt);
12432 if (fmt_str == NULL)
12433 return NULL_TREE;
12435 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12437 /* If we're using an unlocked function, assume the other
12438 unlocked functions exist explicitly. */
12439 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12440 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12442 else
12444 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12445 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12448 if (!init_target_chars ())
12449 return NULL_TREE;
12451 if (strcmp (fmt_str, target_percent_s) == 0
12452 || strchr (fmt_str, target_percent) == NULL)
12454 const char *str;
12456 if (strcmp (fmt_str, target_percent_s) == 0)
12458 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12459 return NULL_TREE;
12461 if (!arg || !validate_arg (arg, POINTER_TYPE))
12462 return NULL_TREE;
12464 str = c_getstr (arg);
12465 if (str == NULL)
12466 return NULL_TREE;
12468 else
12470 /* The format specifier doesn't contain any '%' characters. */
12471 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12472 && arg)
12473 return NULL_TREE;
12474 str = fmt_str;
12477 /* If the string was "", printf does nothing. */
12478 if (str[0] == '\0')
12479 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12481 /* If the string has length of 1, call putchar. */
12482 if (str[1] == '\0')
12484 /* Given printf("c"), (where c is any one character,)
12485 convert "c"[0] to an int and pass that to the replacement
12486 function. */
12487 newarg = build_int_cst (NULL_TREE, str[0]);
12488 if (fn_putchar)
12489 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12491 else
12493 /* If the string was "string\n", call puts("string"). */
12494 size_t len = strlen (str);
12495 if ((unsigned char)str[len - 1] == target_newline)
12497 /* Create a NUL-terminated string that's one char shorter
12498 than the original, stripping off the trailing '\n'. */
12499 char *newstr = XALLOCAVEC (char, len);
12500 memcpy (newstr, str, len - 1);
12501 newstr[len - 1] = 0;
12503 newarg = build_string_literal (len, newstr);
12504 if (fn_puts)
12505 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12507 else
12508 /* We'd like to arrange to call fputs(string,stdout) here,
12509 but we need stdout and don't have a way to get it yet. */
12510 return NULL_TREE;
12514 /* The other optimizations can be done only on the non-va_list variants. */
12515 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12516 return NULL_TREE;
12518 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12519 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12521 if (!arg || !validate_arg (arg, POINTER_TYPE))
12522 return NULL_TREE;
12523 if (fn_puts)
12524 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12527 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12528 else if (strcmp (fmt_str, target_percent_c) == 0)
12530 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12531 return NULL_TREE;
12532 if (fn_putchar)
12533 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12536 if (!call)
12537 return NULL_TREE;
12539 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12542 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12543 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12544 more than 3 arguments, and ARG may be null in the 2-argument case.
12546 Return NULL_TREE if no simplification was possible, otherwise return the
12547 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12548 code of the function to be simplified. */
12550 static tree
12551 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12552 tree fmt, tree arg, bool ignore,
12553 enum built_in_function fcode)
12555 tree fn_fputc, fn_fputs, call = NULL_TREE;
12556 const char *fmt_str = NULL;
12558 /* If the return value is used, don't do the transformation. */
12559 if (! ignore)
12560 return NULL_TREE;
12562 /* Verify the required arguments in the original call. */
12563 if (!validate_arg (fp, POINTER_TYPE))
12564 return NULL_TREE;
12565 if (!validate_arg (fmt, POINTER_TYPE))
12566 return NULL_TREE;
12568 /* Check whether the format is a literal string constant. */
12569 fmt_str = c_getstr (fmt);
12570 if (fmt_str == NULL)
12571 return NULL_TREE;
12573 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12575 /* If we're using an unlocked function, assume the other
12576 unlocked functions exist explicitly. */
12577 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12578 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12580 else
12582 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12583 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12586 if (!init_target_chars ())
12587 return NULL_TREE;
12589 /* If the format doesn't contain % args or %%, use strcpy. */
12590 if (strchr (fmt_str, target_percent) == NULL)
12592 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12593 && arg)
12594 return NULL_TREE;
12596 /* If the format specifier was "", fprintf does nothing. */
12597 if (fmt_str[0] == '\0')
12599 /* If FP has side-effects, just wait until gimplification is
12600 done. */
12601 if (TREE_SIDE_EFFECTS (fp))
12602 return NULL_TREE;
12604 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12607 /* When "string" doesn't contain %, replace all cases of
12608 fprintf (fp, string) with fputs (string, fp). The fputs
12609 builtin will take care of special cases like length == 1. */
12610 if (fn_fputs)
12611 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12614 /* The other optimizations can be done only on the non-va_list variants. */
12615 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12616 return NULL_TREE;
12618 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12619 else if (strcmp (fmt_str, target_percent_s) == 0)
12621 if (!arg || !validate_arg (arg, POINTER_TYPE))
12622 return NULL_TREE;
12623 if (fn_fputs)
12624 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12627 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12628 else if (strcmp (fmt_str, target_percent_c) == 0)
12630 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12631 return NULL_TREE;
12632 if (fn_fputc)
12633 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12636 if (!call)
12637 return NULL_TREE;
12638 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12641 /* Initialize format string characters in the target charset. */
12643 static bool
12644 init_target_chars (void)
12646 static bool init;
12647 if (!init)
12649 target_newline = lang_hooks.to_target_charset ('\n');
12650 target_percent = lang_hooks.to_target_charset ('%');
12651 target_c = lang_hooks.to_target_charset ('c');
12652 target_s = lang_hooks.to_target_charset ('s');
12653 if (target_newline == 0 || target_percent == 0 || target_c == 0
12654 || target_s == 0)
12655 return false;
12657 target_percent_c[0] = target_percent;
12658 target_percent_c[1] = target_c;
12659 target_percent_c[2] = '\0';
12661 target_percent_s[0] = target_percent;
12662 target_percent_s[1] = target_s;
12663 target_percent_s[2] = '\0';
12665 target_percent_s_newline[0] = target_percent;
12666 target_percent_s_newline[1] = target_s;
12667 target_percent_s_newline[2] = target_newline;
12668 target_percent_s_newline[3] = '\0';
12670 init = true;
12672 return true;
12675 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12676 and no overflow/underflow occurred. INEXACT is true if M was not
12677 exactly calculated. TYPE is the tree type for the result. This
12678 function assumes that you cleared the MPFR flags and then
12679 calculated M to see if anything subsequently set a flag prior to
12680 entering this function. Return NULL_TREE if any checks fail. */
12682 static tree
12683 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12685 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12686 overflow/underflow occurred. If -frounding-math, proceed iff the
12687 result of calling FUNC was exact. */
12688 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12689 && (!flag_rounding_math || !inexact))
12691 REAL_VALUE_TYPE rr;
12693 real_from_mpfr (&rr, m, type, GMP_RNDN);
12694 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12695 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12696 but the mpft_t is not, then we underflowed in the
12697 conversion. */
12698 if (real_isfinite (&rr)
12699 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12701 REAL_VALUE_TYPE rmode;
12703 real_convert (&rmode, TYPE_MODE (type), &rr);
12704 /* Proceed iff the specified mode can hold the value. */
12705 if (real_identical (&rmode, &rr))
12706 return build_real (type, rmode);
12709 return NULL_TREE;
12712 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12713 number and no overflow/underflow occurred. INEXACT is true if M
12714 was not exactly calculated. TYPE is the tree type for the result.
12715 This function assumes that you cleared the MPFR flags and then
12716 calculated M to see if anything subsequently set a flag prior to
12717 entering this function. Return NULL_TREE if any checks fail, if
12718 FORCE_CONVERT is true, then bypass the checks. */
12720 static tree
12721 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12723 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12724 overflow/underflow occurred. If -frounding-math, proceed iff the
12725 result of calling FUNC was exact. */
12726 if (force_convert
12727 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12728 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12729 && (!flag_rounding_math || !inexact)))
12731 REAL_VALUE_TYPE re, im;
12733 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12734 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12735 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12736 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12737 but the mpft_t is not, then we underflowed in the
12738 conversion. */
12739 if (force_convert
12740 || (real_isfinite (&re) && real_isfinite (&im)
12741 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12742 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12744 REAL_VALUE_TYPE re_mode, im_mode;
12746 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12747 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12748 /* Proceed iff the specified mode can hold the value. */
12749 if (force_convert
12750 || (real_identical (&re_mode, &re)
12751 && real_identical (&im_mode, &im)))
12752 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12753 build_real (TREE_TYPE (type), im_mode));
12756 return NULL_TREE;
12759 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12760 FUNC on it and return the resulting value as a tree with type TYPE.
12761 If MIN and/or MAX are not NULL, then the supplied ARG must be
12762 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12763 acceptable values, otherwise they are not. The mpfr precision is
12764 set to the precision of TYPE. We assume that function FUNC returns
12765 zero if the result could be calculated exactly within the requested
12766 precision. */
12768 static tree
12769 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12770 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12771 bool inclusive)
12773 tree result = NULL_TREE;
12775 STRIP_NOPS (arg);
12777 /* To proceed, MPFR must exactly represent the target floating point
12778 format, which only happens when the target base equals two. */
12779 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12780 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12782 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12784 if (real_isfinite (ra)
12785 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12786 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12788 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12789 const int prec = fmt->p;
12790 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12791 int inexact;
12792 mpfr_t m;
12794 mpfr_init2 (m, prec);
12795 mpfr_from_real (m, ra, GMP_RNDN);
12796 mpfr_clear_flags ();
12797 inexact = func (m, m, rnd);
12798 result = do_mpfr_ckconv (m, type, inexact);
12799 mpfr_clear (m);
12803 return result;
12806 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12807 FUNC on it and return the resulting value as a tree with type TYPE.
12808 The mpfr precision is set to the precision of TYPE. We assume that
12809 function FUNC returns zero if the result could be calculated
12810 exactly within the requested precision. */
12812 static tree
12813 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12814 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12816 tree result = NULL_TREE;
12818 STRIP_NOPS (arg1);
12819 STRIP_NOPS (arg2);
12821 /* To proceed, MPFR must exactly represent the target floating point
12822 format, which only happens when the target base equals two. */
12823 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12824 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12825 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12827 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12828 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12830 if (real_isfinite (ra1) && real_isfinite (ra2))
12832 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12833 const int prec = fmt->p;
12834 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12835 int inexact;
12836 mpfr_t m1, m2;
12838 mpfr_inits2 (prec, m1, m2, NULL);
12839 mpfr_from_real (m1, ra1, GMP_RNDN);
12840 mpfr_from_real (m2, ra2, GMP_RNDN);
12841 mpfr_clear_flags ();
12842 inexact = func (m1, m1, m2, rnd);
12843 result = do_mpfr_ckconv (m1, type, inexact);
12844 mpfr_clears (m1, m2, NULL);
12848 return result;
12851 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12852 FUNC on it and return the resulting value as a tree with type TYPE.
12853 The mpfr precision is set to the precision of TYPE. We assume that
12854 function FUNC returns zero if the result could be calculated
12855 exactly within the requested precision. */
12857 static tree
12858 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12859 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12861 tree result = NULL_TREE;
12863 STRIP_NOPS (arg1);
12864 STRIP_NOPS (arg2);
12865 STRIP_NOPS (arg3);
12867 /* To proceed, MPFR must exactly represent the target floating point
12868 format, which only happens when the target base equals two. */
12869 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12870 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12871 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12872 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12874 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12875 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12876 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12878 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12880 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12881 const int prec = fmt->p;
12882 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12883 int inexact;
12884 mpfr_t m1, m2, m3;
12886 mpfr_inits2 (prec, m1, m2, m3, NULL);
12887 mpfr_from_real (m1, ra1, GMP_RNDN);
12888 mpfr_from_real (m2, ra2, GMP_RNDN);
12889 mpfr_from_real (m3, ra3, GMP_RNDN);
12890 mpfr_clear_flags ();
12891 inexact = func (m1, m1, m2, m3, rnd);
12892 result = do_mpfr_ckconv (m1, type, inexact);
12893 mpfr_clears (m1, m2, m3, NULL);
12897 return result;
12900 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12901 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12902 If ARG_SINP and ARG_COSP are NULL then the result is returned
12903 as a complex value.
12904 The type is taken from the type of ARG and is used for setting the
12905 precision of the calculation and results. */
12907 static tree
12908 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12910 tree const type = TREE_TYPE (arg);
12911 tree result = NULL_TREE;
12913 STRIP_NOPS (arg);
12915 /* To proceed, MPFR must exactly represent the target floating point
12916 format, which only happens when the target base equals two. */
12917 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12918 && TREE_CODE (arg) == REAL_CST
12919 && !TREE_OVERFLOW (arg))
12921 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12923 if (real_isfinite (ra))
12925 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12926 const int prec = fmt->p;
12927 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12928 tree result_s, result_c;
12929 int inexact;
12930 mpfr_t m, ms, mc;
12932 mpfr_inits2 (prec, m, ms, mc, NULL);
12933 mpfr_from_real (m, ra, GMP_RNDN);
12934 mpfr_clear_flags ();
12935 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12936 result_s = do_mpfr_ckconv (ms, type, inexact);
12937 result_c = do_mpfr_ckconv (mc, type, inexact);
12938 mpfr_clears (m, ms, mc, NULL);
12939 if (result_s && result_c)
12941 /* If we are to return in a complex value do so. */
12942 if (!arg_sinp && !arg_cosp)
12943 return build_complex (build_complex_type (type),
12944 result_c, result_s);
12946 /* Dereference the sin/cos pointer arguments. */
12947 arg_sinp = build_fold_indirect_ref (arg_sinp);
12948 arg_cosp = build_fold_indirect_ref (arg_cosp);
12949 /* Proceed if valid pointer type were passed in. */
12950 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12951 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12953 /* Set the values. */
12954 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12955 result_s);
12956 TREE_SIDE_EFFECTS (result_s) = 1;
12957 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12958 result_c);
12959 TREE_SIDE_EFFECTS (result_c) = 1;
12960 /* Combine the assignments into a compound expr. */
12961 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12962 result_s, result_c));
12967 return result;
12970 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12971 two-argument mpfr order N Bessel function FUNC on them and return
12972 the resulting value as a tree with type TYPE. The mpfr precision
12973 is set to the precision of TYPE. We assume that function FUNC
12974 returns zero if the result could be calculated exactly within the
12975 requested precision. */
12976 static tree
12977 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12978 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12979 const REAL_VALUE_TYPE *min, bool inclusive)
12981 tree result = NULL_TREE;
12983 STRIP_NOPS (arg1);
12984 STRIP_NOPS (arg2);
12986 /* To proceed, MPFR must exactly represent the target floating point
12987 format, which only happens when the target base equals two. */
12988 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12989 && host_integerp (arg1, 0)
12990 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12992 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12993 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12995 if (n == (long)n
12996 && real_isfinite (ra)
12997 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12999 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13000 const int prec = fmt->p;
13001 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13002 int inexact;
13003 mpfr_t m;
13005 mpfr_init2 (m, prec);
13006 mpfr_from_real (m, ra, GMP_RNDN);
13007 mpfr_clear_flags ();
13008 inexact = func (m, n, m, rnd);
13009 result = do_mpfr_ckconv (m, type, inexact);
13010 mpfr_clear (m);
13014 return result;
13017 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13018 the pointer *(ARG_QUO) and return the result. The type is taken
13019 from the type of ARG0 and is used for setting the precision of the
13020 calculation and results. */
13022 static tree
13023 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13025 tree const type = TREE_TYPE (arg0);
13026 tree result = NULL_TREE;
13028 STRIP_NOPS (arg0);
13029 STRIP_NOPS (arg1);
13031 /* To proceed, MPFR must exactly represent the target floating point
13032 format, which only happens when the target base equals two. */
13033 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13034 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13035 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13037 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13038 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13040 if (real_isfinite (ra0) && real_isfinite (ra1))
13042 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13043 const int prec = fmt->p;
13044 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13045 tree result_rem;
13046 long integer_quo;
13047 mpfr_t m0, m1;
13049 mpfr_inits2 (prec, m0, m1, NULL);
13050 mpfr_from_real (m0, ra0, GMP_RNDN);
13051 mpfr_from_real (m1, ra1, GMP_RNDN);
13052 mpfr_clear_flags ();
13053 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13054 /* Remquo is independent of the rounding mode, so pass
13055 inexact=0 to do_mpfr_ckconv(). */
13056 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13057 mpfr_clears (m0, m1, NULL);
13058 if (result_rem)
13060 /* MPFR calculates quo in the host's long so it may
13061 return more bits in quo than the target int can hold
13062 if sizeof(host long) > sizeof(target int). This can
13063 happen even for native compilers in LP64 mode. In
13064 these cases, modulo the quo value with the largest
13065 number that the target int can hold while leaving one
13066 bit for the sign. */
13067 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13068 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13070 /* Dereference the quo pointer argument. */
13071 arg_quo = build_fold_indirect_ref (arg_quo);
13072 /* Proceed iff a valid pointer type was passed in. */
13073 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13075 /* Set the value. */
13076 tree result_quo = fold_build2 (MODIFY_EXPR,
13077 TREE_TYPE (arg_quo), arg_quo,
13078 build_int_cst (NULL, integer_quo));
13079 TREE_SIDE_EFFECTS (result_quo) = 1;
13080 /* Combine the quo assignment with the rem. */
13081 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13082 result_quo, result_rem));
13087 return result;
13090 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13091 resulting value as a tree with type TYPE. The mpfr precision is
13092 set to the precision of TYPE. We assume that this mpfr function
13093 returns zero if the result could be calculated exactly within the
13094 requested precision. In addition, the integer pointer represented
13095 by ARG_SG will be dereferenced and set to the appropriate signgam
13096 (-1,1) value. */
13098 static tree
13099 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13101 tree result = NULL_TREE;
13103 STRIP_NOPS (arg);
13105 /* To proceed, MPFR must exactly represent the target floating point
13106 format, which only happens when the target base equals two. Also
13107 verify ARG is a constant and that ARG_SG is an int pointer. */
13108 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13109 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13110 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13111 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13113 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13115 /* In addition to NaN and Inf, the argument cannot be zero or a
13116 negative integer. */
13117 if (real_isfinite (ra)
13118 && ra->cl != rvc_zero
13119 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13121 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13122 const int prec = fmt->p;
13123 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13124 int inexact, sg;
13125 mpfr_t m;
13126 tree result_lg;
13128 mpfr_init2 (m, prec);
13129 mpfr_from_real (m, ra, GMP_RNDN);
13130 mpfr_clear_flags ();
13131 inexact = mpfr_lgamma (m, &sg, m, rnd);
13132 result_lg = do_mpfr_ckconv (m, type, inexact);
13133 mpfr_clear (m);
13134 if (result_lg)
13136 tree result_sg;
13138 /* Dereference the arg_sg pointer argument. */
13139 arg_sg = build_fold_indirect_ref (arg_sg);
13140 /* Assign the signgam value into *arg_sg. */
13141 result_sg = fold_build2 (MODIFY_EXPR,
13142 TREE_TYPE (arg_sg), arg_sg,
13143 build_int_cst (NULL, sg));
13144 TREE_SIDE_EFFECTS (result_sg) = 1;
13145 /* Combine the signgam assignment with the lgamma result. */
13146 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13147 result_sg, result_lg));
13152 return result;
13155 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13156 function FUNC on it and return the resulting value as a tree with
13157 type TYPE. The mpfr precision is set to the precision of TYPE. We
13158 assume that function FUNC returns zero if the result could be
13159 calculated exactly within the requested precision. */
13161 static tree
13162 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13164 tree result = NULL_TREE;
13166 STRIP_NOPS (arg);
13168 /* To proceed, MPFR must exactly represent the target floating point
13169 format, which only happens when the target base equals two. */
13170 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13171 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13172 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13174 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13175 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13177 if (real_isfinite (re) && real_isfinite (im))
13179 const struct real_format *const fmt =
13180 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13181 const int prec = fmt->p;
13182 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13183 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13184 int inexact;
13185 mpc_t m;
13187 mpc_init2 (m, prec);
13188 mpfr_from_real (mpc_realref(m), re, rnd);
13189 mpfr_from_real (mpc_imagref(m), im, rnd);
13190 mpfr_clear_flags ();
13191 inexact = func (m, m, crnd);
13192 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13193 mpc_clear (m);
13197 return result;
13200 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13201 mpc function FUNC on it and return the resulting value as a tree
13202 with type TYPE. The mpfr precision is set to the precision of
13203 TYPE. We assume that function FUNC returns zero if the result
13204 could be calculated exactly within the requested precision. If
13205 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13206 in the arguments and/or results. */
13208 tree
13209 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13210 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13212 tree result = NULL_TREE;
13214 STRIP_NOPS (arg0);
13215 STRIP_NOPS (arg1);
13217 /* To proceed, MPFR must exactly represent the target floating point
13218 format, which only happens when the target base equals two. */
13219 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13220 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13221 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13222 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13223 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13225 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13226 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13227 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13228 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13230 if (do_nonfinite
13231 || (real_isfinite (re0) && real_isfinite (im0)
13232 && real_isfinite (re1) && real_isfinite (im1)))
13234 const struct real_format *const fmt =
13235 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13236 const int prec = fmt->p;
13237 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13238 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13239 int inexact;
13240 mpc_t m0, m1;
13242 mpc_init2 (m0, prec);
13243 mpc_init2 (m1, prec);
13244 mpfr_from_real (mpc_realref(m0), re0, rnd);
13245 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13246 mpfr_from_real (mpc_realref(m1), re1, rnd);
13247 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13248 mpfr_clear_flags ();
13249 inexact = func (m0, m0, m1, crnd);
13250 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13251 mpc_clear (m0);
13252 mpc_clear (m1);
13256 return result;
13259 /* FIXME tuples.
13260 The functions below provide an alternate interface for folding
13261 builtin function calls presented as GIMPLE_CALL statements rather
13262 than as CALL_EXPRs. The folded result is still expressed as a
13263 tree. There is too much code duplication in the handling of
13264 varargs functions, and a more intrusive re-factoring would permit
13265 better sharing of code between the tree and statement-based
13266 versions of these functions. */
13268 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13269 along with N new arguments specified as the "..." parameters. SKIP
13270 is the number of arguments in STMT to be omitted. This function is used
13271 to do varargs-to-varargs transformations. */
13273 static tree
13274 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13276 int oldnargs = gimple_call_num_args (stmt);
13277 int nargs = oldnargs - skip + n;
13278 tree fntype = TREE_TYPE (fndecl);
13279 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13280 tree *buffer;
13281 int i, j;
13282 va_list ap;
13283 location_t loc = gimple_location (stmt);
13285 buffer = XALLOCAVEC (tree, nargs);
13286 va_start (ap, n);
13287 for (i = 0; i < n; i++)
13288 buffer[i] = va_arg (ap, tree);
13289 va_end (ap);
13290 for (j = skip; j < oldnargs; j++, i++)
13291 buffer[i] = gimple_call_arg (stmt, j);
13293 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13296 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13297 a normal call should be emitted rather than expanding the function
13298 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13300 static tree
13301 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13303 tree dest, size, len, fn, fmt, flag;
13304 const char *fmt_str;
13305 int nargs = gimple_call_num_args (stmt);
13307 /* Verify the required arguments in the original call. */
13308 if (nargs < 4)
13309 return NULL_TREE;
13310 dest = gimple_call_arg (stmt, 0);
13311 if (!validate_arg (dest, POINTER_TYPE))
13312 return NULL_TREE;
13313 flag = gimple_call_arg (stmt, 1);
13314 if (!validate_arg (flag, INTEGER_TYPE))
13315 return NULL_TREE;
13316 size = gimple_call_arg (stmt, 2);
13317 if (!validate_arg (size, INTEGER_TYPE))
13318 return NULL_TREE;
13319 fmt = gimple_call_arg (stmt, 3);
13320 if (!validate_arg (fmt, POINTER_TYPE))
13321 return NULL_TREE;
13323 if (! host_integerp (size, 1))
13324 return NULL_TREE;
13326 len = NULL_TREE;
13328 if (!init_target_chars ())
13329 return NULL_TREE;
13331 /* Check whether the format is a literal string constant. */
13332 fmt_str = c_getstr (fmt);
13333 if (fmt_str != NULL)
13335 /* If the format doesn't contain % args or %%, we know the size. */
13336 if (strchr (fmt_str, target_percent) == 0)
13338 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13339 len = build_int_cstu (size_type_node, strlen (fmt_str));
13341 /* If the format is "%s" and first ... argument is a string literal,
13342 we know the size too. */
13343 else if (fcode == BUILT_IN_SPRINTF_CHK
13344 && strcmp (fmt_str, target_percent_s) == 0)
13346 tree arg;
13348 if (nargs == 5)
13350 arg = gimple_call_arg (stmt, 4);
13351 if (validate_arg (arg, POINTER_TYPE))
13353 len = c_strlen (arg, 1);
13354 if (! len || ! host_integerp (len, 1))
13355 len = NULL_TREE;
13361 if (! integer_all_onesp (size))
13363 if (! len || ! tree_int_cst_lt (len, size))
13364 return NULL_TREE;
13367 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13368 or if format doesn't contain % chars or is "%s". */
13369 if (! integer_zerop (flag))
13371 if (fmt_str == NULL)
13372 return NULL_TREE;
13373 if (strchr (fmt_str, target_percent) != NULL
13374 && strcmp (fmt_str, target_percent_s))
13375 return NULL_TREE;
13378 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13379 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13380 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13381 if (!fn)
13382 return NULL_TREE;
13384 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13387 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13388 a normal call should be emitted rather than expanding the function
13389 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13390 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13391 passed as second argument. */
13393 tree
13394 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13395 enum built_in_function fcode)
13397 tree dest, size, len, fn, fmt, flag;
13398 const char *fmt_str;
13400 /* Verify the required arguments in the original call. */
13401 if (gimple_call_num_args (stmt) < 5)
13402 return NULL_TREE;
13403 dest = gimple_call_arg (stmt, 0);
13404 if (!validate_arg (dest, POINTER_TYPE))
13405 return NULL_TREE;
13406 len = gimple_call_arg (stmt, 1);
13407 if (!validate_arg (len, INTEGER_TYPE))
13408 return NULL_TREE;
13409 flag = gimple_call_arg (stmt, 2);
13410 if (!validate_arg (flag, INTEGER_TYPE))
13411 return NULL_TREE;
13412 size = gimple_call_arg (stmt, 3);
13413 if (!validate_arg (size, INTEGER_TYPE))
13414 return NULL_TREE;
13415 fmt = gimple_call_arg (stmt, 4);
13416 if (!validate_arg (fmt, POINTER_TYPE))
13417 return NULL_TREE;
13419 if (! host_integerp (size, 1))
13420 return NULL_TREE;
13422 if (! integer_all_onesp (size))
13424 if (! host_integerp (len, 1))
13426 /* If LEN is not constant, try MAXLEN too.
13427 For MAXLEN only allow optimizing into non-_ocs function
13428 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13429 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13430 return NULL_TREE;
13432 else
13433 maxlen = len;
13435 if (tree_int_cst_lt (size, maxlen))
13436 return NULL_TREE;
13439 if (!init_target_chars ())
13440 return NULL_TREE;
13442 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13443 or if format doesn't contain % chars or is "%s". */
13444 if (! integer_zerop (flag))
13446 fmt_str = c_getstr (fmt);
13447 if (fmt_str == NULL)
13448 return NULL_TREE;
13449 if (strchr (fmt_str, target_percent) != NULL
13450 && strcmp (fmt_str, target_percent_s))
13451 return NULL_TREE;
13454 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13455 available. */
13456 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13457 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13458 if (!fn)
13459 return NULL_TREE;
13461 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13464 /* Builtins with folding operations that operate on "..." arguments
13465 need special handling; we need to store the arguments in a convenient
13466 data structure before attempting any folding. Fortunately there are
13467 only a few builtins that fall into this category. FNDECL is the
13468 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13469 result of the function call is ignored. */
13471 static tree
13472 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13473 bool ignore ATTRIBUTE_UNUSED)
13475 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13476 tree ret = NULL_TREE;
13478 switch (fcode)
13480 case BUILT_IN_SPRINTF_CHK:
13481 case BUILT_IN_VSPRINTF_CHK:
13482 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13483 break;
13485 case BUILT_IN_SNPRINTF_CHK:
13486 case BUILT_IN_VSNPRINTF_CHK:
13487 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13489 default:
13490 break;
13492 if (ret)
13494 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13495 TREE_NO_WARNING (ret) = 1;
13496 return ret;
13498 return NULL_TREE;
13501 /* A wrapper function for builtin folding that prevents warnings for
13502 "statement without effect" and the like, caused by removing the
13503 call node earlier than the warning is generated. */
13505 tree
13506 fold_call_stmt (gimple stmt, bool ignore)
13508 tree ret = NULL_TREE;
13509 tree fndecl = gimple_call_fndecl (stmt);
13510 location_t loc = gimple_location (stmt);
13511 if (fndecl
13512 && TREE_CODE (fndecl) == FUNCTION_DECL
13513 && DECL_BUILT_IN (fndecl)
13514 && !gimple_call_va_arg_pack_p (stmt))
13516 int nargs = gimple_call_num_args (stmt);
13518 if (avoid_folding_inline_builtin (fndecl))
13519 return NULL_TREE;
13520 /* FIXME: Don't use a list in this interface. */
13521 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13523 tree arglist = NULL_TREE;
13524 int i;
13525 for (i = nargs - 1; i >= 0; i--)
13526 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13527 return targetm.fold_builtin (fndecl, arglist, ignore);
13529 else
13531 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13533 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13534 int i;
13535 for (i = 0; i < nargs; i++)
13536 args[i] = gimple_call_arg (stmt, i);
13537 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13539 if (!ret)
13540 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13541 if (ret)
13543 /* Propagate location information from original call to
13544 expansion of builtin. Otherwise things like
13545 maybe_emit_chk_warning, that operate on the expansion
13546 of a builtin, will use the wrong location information. */
13547 if (gimple_has_location (stmt))
13549 tree realret = ret;
13550 if (TREE_CODE (ret) == NOP_EXPR)
13551 realret = TREE_OPERAND (ret, 0);
13552 if (CAN_HAVE_LOCATION_P (realret)
13553 && !EXPR_HAS_LOCATION (realret))
13554 SET_EXPR_LOCATION (realret, loc);
13555 return realret;
13557 return ret;
13561 return NULL_TREE;
13564 /* Look up the function in built_in_decls that corresponds to DECL
13565 and set ASMSPEC as its user assembler name. DECL must be a
13566 function decl that declares a builtin. */
13568 void
13569 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13571 tree builtin;
13572 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13573 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13574 && asmspec != 0);
13576 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13577 set_user_assembler_name (builtin, asmspec);
13578 switch (DECL_FUNCTION_CODE (decl))
13580 case BUILT_IN_MEMCPY:
13581 init_block_move_fn (asmspec);
13582 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13583 break;
13584 case BUILT_IN_MEMSET:
13585 init_block_clear_fn (asmspec);
13586 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13587 break;
13588 case BUILT_IN_MEMMOVE:
13589 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13590 break;
13591 case BUILT_IN_MEMCMP:
13592 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13593 break;
13594 case BUILT_IN_ABORT:
13595 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13596 break;
13597 case BUILT_IN_FFS:
13598 if (INT_TYPE_SIZE < BITS_PER_WORD)
13600 set_user_assembler_libfunc ("ffs", asmspec);
13601 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13602 MODE_INT, 0), "ffs");
13604 break;
13605 default:
13606 break;