* call.c (type_decays_to): Call cv_unqualified for non-class type.
[official-gcc.git] / gcc / builtins.c
blob7787f695ac82894ff4ec46401e44801f3fb5fe78
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
72 #undef DEF_BUILTIN
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
90 #endif
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 enum tree_code);
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
230 bool
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 return false;
241 /* Return true if DECL is a function symbol representing a built-in. */
243 bool
244 is_builtin_fn (tree decl)
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
254 static bool
255 called_as_built_in (tree node)
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
259 will have. */
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
271 unsigned int inner;
273 inner = max_align;
274 if (handled_component_p (exp))
276 HOST_WIDE_INT bitsize, bitpos;
277 tree offset;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
283 if (bitpos)
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
285 while (offset)
287 tree next_offset;
289 if (TREE_CODE (offset) == PLUS_EXPR)
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
294 else
295 next_offset = NULL;
296 if (host_integerp (offset, 1))
298 /* Any overflow in calculating offset_bits won't change
299 the alignment. */
300 unsigned offset_bits
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
303 if (offset_bits)
304 inner = MIN (inner, (offset_bits & -offset_bits));
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
309 /* Any overflow in calculating offset_factor won't change
310 the alignment. */
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
313 * BITS_PER_UNIT);
315 if (offset_factor)
316 inner = MIN (inner, (offset_factor & -offset_factor));
318 else
320 inner = MIN (inner, BITS_PER_UNIT);
321 break;
323 offset = next_offset;
326 if (TREE_CODE (exp) == CONST_DECL)
327 exp = DECL_INITIAL (exp);
328 if (DECL_P (exp)
329 && TREE_CODE (exp) != LABEL_DECL)
330 align = MIN (inner, DECL_ALIGN (exp));
331 #ifdef CONSTANT_ALIGNMENT
332 else if (CONSTANT_CLASS_P (exp))
333 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
334 #endif
335 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
336 || TREE_CODE (exp) == INDIRECT_REF)
337 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
338 else
339 align = MIN (align, inner);
340 return MIN (align, max_align);
343 /* Returns true iff we can trust that alignment information has been
344 calculated properly. */
346 bool
347 can_trust_pointer_alignment (void)
349 /* We rely on TER to compute accurate alignment information. */
350 return (optimize && flag_tree_ter);
353 /* Return the alignment in bits of EXP, a pointer valued expression.
354 But don't return more than MAX_ALIGN no matter what.
355 The alignment returned is, by default, the alignment of the thing that
356 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
358 Otherwise, look at the expression to see if we can do better, i.e., if the
359 expression is actually pointing at an object whose alignment is tighter. */
362 get_pointer_alignment (tree exp, unsigned int max_align)
364 unsigned int align, inner;
366 if (!can_trust_pointer_alignment ())
367 return 0;
369 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
370 return 0;
372 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
373 align = MIN (align, max_align);
375 while (1)
377 switch (TREE_CODE (exp))
379 CASE_CONVERT:
380 exp = TREE_OPERAND (exp, 0);
381 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
382 return align;
384 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
385 align = MIN (inner, max_align);
386 break;
388 case POINTER_PLUS_EXPR:
389 /* If sum of pointer + int, restrict our maximum alignment to that
390 imposed by the integer. If not, we can't do any better than
391 ALIGN. */
392 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
393 return align;
395 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
396 & (max_align / BITS_PER_UNIT - 1))
397 != 0)
398 max_align >>= 1;
400 exp = TREE_OPERAND (exp, 0);
401 break;
403 case ADDR_EXPR:
404 /* See what we are pointing at and look at its alignment. */
405 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
407 default:
408 return align;
413 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
414 way, because it could contain a zero byte in the middle.
415 TREE_STRING_LENGTH is the size of the character array, not the string.
417 ONLY_VALUE should be nonzero if the result is not going to be emitted
418 into the instruction stream and zero if it is going to be expanded.
419 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
420 is returned, otherwise NULL, since
421 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
422 evaluate the side-effects.
424 The value returned is of type `ssizetype'.
426 Unfortunately, string_constant can't access the values of const char
427 arrays with initializers, so neither can we do so here. */
429 tree
430 c_strlen (tree src, int only_value)
432 tree offset_node;
433 HOST_WIDE_INT offset;
434 int max;
435 const char *ptr;
436 location_t loc;
438 STRIP_NOPS (src);
439 if (TREE_CODE (src) == COND_EXPR
440 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
442 tree len1, len2;
444 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
445 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
446 if (tree_int_cst_equal (len1, len2))
447 return len1;
450 if (TREE_CODE (src) == COMPOUND_EXPR
451 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
452 return c_strlen (TREE_OPERAND (src, 1), only_value);
454 if (EXPR_HAS_LOCATION (src))
455 loc = EXPR_LOCATION (src);
456 else
457 loc = input_location;
459 src = string_constant (src, &offset_node);
460 if (src == 0)
461 return NULL_TREE;
463 max = TREE_STRING_LENGTH (src) - 1;
464 ptr = TREE_STRING_POINTER (src);
466 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
468 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
469 compute the offset to the following null if we don't know where to
470 start searching for it. */
471 int i;
473 for (i = 0; i < max; i++)
474 if (ptr[i] == 0)
475 return NULL_TREE;
477 /* We don't know the starting offset, but we do know that the string
478 has no internal zero bytes. We can assume that the offset falls
479 within the bounds of the string; otherwise, the programmer deserves
480 what he gets. Subtract the offset from the length of the string,
481 and return that. This would perhaps not be valid if we were dealing
482 with named arrays in addition to literal string constants. */
484 return size_diffop_loc (loc, size_int (max), offset_node);
487 /* We have a known offset into the string. Start searching there for
488 a null character if we can represent it as a single HOST_WIDE_INT. */
489 if (offset_node == 0)
490 offset = 0;
491 else if (! host_integerp (offset_node, 0))
492 offset = -1;
493 else
494 offset = tree_low_cst (offset_node, 0);
496 /* If the offset is known to be out of bounds, warn, and call strlen at
497 runtime. */
498 if (offset < 0 || offset > max)
500 /* Suppress multiple warnings for propagated constant strings. */
501 if (! TREE_NO_WARNING (src))
503 warning_at (loc, 0, "offset outside bounds of constant string");
504 TREE_NO_WARNING (src) = 1;
506 return NULL_TREE;
509 /* Use strlen to search for the first zero byte. Since any strings
510 constructed with build_string will have nulls appended, we win even
511 if we get handed something like (char[4])"abcd".
513 Since OFFSET is our starting index into the string, no further
514 calculation is needed. */
515 return ssize_int (strlen (ptr + offset));
518 /* Return a char pointer for a C string if it is a string constant
519 or sum of string constant and integer constant. */
521 static const char *
522 c_getstr (tree src)
524 tree offset_node;
526 src = string_constant (src, &offset_node);
527 if (src == 0)
528 return 0;
530 if (offset_node == 0)
531 return TREE_STRING_POINTER (src);
532 else if (!host_integerp (offset_node, 1)
533 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
534 return 0;
536 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
539 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
540 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
542 static rtx
543 c_readstr (const char *str, enum machine_mode mode)
545 HOST_WIDE_INT c[2];
546 HOST_WIDE_INT ch;
547 unsigned int i, j;
549 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
551 c[0] = 0;
552 c[1] = 0;
553 ch = 1;
554 for (i = 0; i < GET_MODE_SIZE (mode); i++)
556 j = i;
557 if (WORDS_BIG_ENDIAN)
558 j = GET_MODE_SIZE (mode) - i - 1;
559 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
560 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
561 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
562 j *= BITS_PER_UNIT;
563 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
565 if (ch)
566 ch = (unsigned char) str[i];
567 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
569 return immed_double_const (c[0], c[1], mode);
572 /* Cast a target constant CST to target CHAR and if that value fits into
573 host char type, return zero and put that value into variable pointed to by
574 P. */
576 static int
577 target_char_cast (tree cst, char *p)
579 unsigned HOST_WIDE_INT val, hostval;
581 if (!host_integerp (cst, 1)
582 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
583 return 1;
585 val = tree_low_cst (cst, 1);
586 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
587 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
589 hostval = val;
590 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
591 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
593 if (val != hostval)
594 return 1;
596 *p = hostval;
597 return 0;
600 /* Similar to save_expr, but assumes that arbitrary code is not executed
601 in between the multiple evaluations. In particular, we assume that a
602 non-addressable local variable will not be modified. */
604 static tree
605 builtin_save_expr (tree exp)
607 if (TREE_ADDRESSABLE (exp) == 0
608 && (TREE_CODE (exp) == PARM_DECL
609 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
610 return exp;
612 return save_expr (exp);
615 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
616 times to get the address of either a higher stack frame, or a return
617 address located within it (depending on FNDECL_CODE). */
619 static rtx
620 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
622 int i;
624 #ifdef INITIAL_FRAME_ADDRESS_RTX
625 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
626 #else
627 rtx tem;
629 /* For a zero count with __builtin_return_address, we don't care what
630 frame address we return, because target-specific definitions will
631 override us. Therefore frame pointer elimination is OK, and using
632 the soft frame pointer is OK.
634 For a nonzero count, or a zero count with __builtin_frame_address,
635 we require a stable offset from the current frame pointer to the
636 previous one, so we must use the hard frame pointer, and
637 we must disable frame pointer elimination. */
638 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
639 tem = frame_pointer_rtx;
640 else
642 tem = hard_frame_pointer_rtx;
644 /* Tell reload not to eliminate the frame pointer. */
645 crtl->accesses_prior_frames = 1;
647 #endif
649 /* Some machines need special handling before we can access
650 arbitrary frames. For example, on the SPARC, we must first flush
651 all register windows to the stack. */
652 #ifdef SETUP_FRAME_ADDRESSES
653 if (count > 0)
654 SETUP_FRAME_ADDRESSES ();
655 #endif
657 /* On the SPARC, the return address is not in the frame, it is in a
658 register. There is no way to access it off of the current frame
659 pointer, but it can be accessed off the previous frame pointer by
660 reading the value from the register window save area. */
661 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
662 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
663 count--;
664 #endif
666 /* Scan back COUNT frames to the specified frame. */
667 for (i = 0; i < count; i++)
669 /* Assume the dynamic chain pointer is in the word that the
670 frame address points to, unless otherwise specified. */
671 #ifdef DYNAMIC_CHAIN_ADDRESS
672 tem = DYNAMIC_CHAIN_ADDRESS (tem);
673 #endif
674 tem = memory_address (Pmode, tem);
675 tem = gen_frame_mem (Pmode, tem);
676 tem = copy_to_reg (tem);
679 /* For __builtin_frame_address, return what we've got. But, on
680 the SPARC for example, we may have to add a bias. */
681 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
682 #ifdef FRAME_ADDR_RTX
683 return FRAME_ADDR_RTX (tem);
684 #else
685 return tem;
686 #endif
688 /* For __builtin_return_address, get the return address from that frame. */
689 #ifdef RETURN_ADDR_RTX
690 tem = RETURN_ADDR_RTX (count, tem);
691 #else
692 tem = memory_address (Pmode,
693 plus_constant (tem, GET_MODE_SIZE (Pmode)));
694 tem = gen_frame_mem (Pmode, tem);
695 #endif
696 return tem;
699 /* Alias set used for setjmp buffer. */
700 static alias_set_type setjmp_alias_set = -1;
702 /* Construct the leading half of a __builtin_setjmp call. Control will
703 return to RECEIVER_LABEL. This is also called directly by the SJLJ
704 exception handling code. */
706 void
707 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
709 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
710 rtx stack_save;
711 rtx mem;
713 if (setjmp_alias_set == -1)
714 setjmp_alias_set = new_alias_set ();
716 buf_addr = convert_memory_address (Pmode, buf_addr);
718 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
720 /* We store the frame pointer and the address of receiver_label in
721 the buffer and use the rest of it for the stack save area, which
722 is machine-dependent. */
724 mem = gen_rtx_MEM (Pmode, buf_addr);
725 set_mem_alias_set (mem, setjmp_alias_set);
726 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
728 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
729 set_mem_alias_set (mem, setjmp_alias_set);
731 emit_move_insn (validize_mem (mem),
732 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
734 stack_save = gen_rtx_MEM (sa_mode,
735 plus_constant (buf_addr,
736 2 * GET_MODE_SIZE (Pmode)));
737 set_mem_alias_set (stack_save, setjmp_alias_set);
738 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
740 /* If there is further processing to do, do it. */
741 #ifdef HAVE_builtin_setjmp_setup
742 if (HAVE_builtin_setjmp_setup)
743 emit_insn (gen_builtin_setjmp_setup (buf_addr));
744 #endif
746 /* Tell optimize_save_area_alloca that extra work is going to
747 need to go on during alloca. */
748 cfun->calls_setjmp = 1;
750 /* We have a nonlocal label. */
751 cfun->has_nonlocal_label = 1;
754 /* Construct the trailing part of a __builtin_setjmp call. This is
755 also called directly by the SJLJ exception handling code. */
757 void
758 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
760 rtx chain;
762 /* Clobber the FP when we get here, so we have to make sure it's
763 marked as used by this function. */
764 emit_use (hard_frame_pointer_rtx);
766 /* Mark the static chain as clobbered here so life information
767 doesn't get messed up for it. */
768 chain = targetm.calls.static_chain (current_function_decl, true);
769 if (chain && REG_P (chain))
770 emit_clobber (chain);
772 /* Now put in the code to restore the frame pointer, and argument
773 pointer, if needed. */
774 #ifdef HAVE_nonlocal_goto
775 if (! HAVE_nonlocal_goto)
776 #endif
778 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
779 /* This might change the hard frame pointer in ways that aren't
780 apparent to early optimization passes, so force a clobber. */
781 emit_clobber (hard_frame_pointer_rtx);
784 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
785 if (fixed_regs[ARG_POINTER_REGNUM])
787 #ifdef ELIMINABLE_REGS
788 size_t i;
789 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
791 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
792 if (elim_regs[i].from == ARG_POINTER_REGNUM
793 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
794 break;
796 if (i == ARRAY_SIZE (elim_regs))
797 #endif
799 /* Now restore our arg pointer from the address at which it
800 was saved in our stack frame. */
801 emit_move_insn (crtl->args.internal_arg_pointer,
802 copy_to_reg (get_arg_pointer_save_area ()));
805 #endif
807 #ifdef HAVE_builtin_setjmp_receiver
808 if (HAVE_builtin_setjmp_receiver)
809 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
810 else
811 #endif
812 #ifdef HAVE_nonlocal_goto_receiver
813 if (HAVE_nonlocal_goto_receiver)
814 emit_insn (gen_nonlocal_goto_receiver ());
815 else
816 #endif
817 { /* Nothing */ }
819 /* We must not allow the code we just generated to be reordered by
820 scheduling. Specifically, the update of the frame pointer must
821 happen immediately, not later. */
822 emit_insn (gen_blockage ());
825 /* __builtin_longjmp is passed a pointer to an array of five words (not
826 all will be used on all machines). It operates similarly to the C
827 library function of the same name, but is more efficient. Much of
828 the code below is copied from the handling of non-local gotos. */
830 static void
831 expand_builtin_longjmp (rtx buf_addr, rtx value)
833 rtx fp, lab, stack, insn, last;
834 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
836 /* DRAP is needed for stack realign if longjmp is expanded to current
837 function */
838 if (SUPPORTS_STACK_ALIGNMENT)
839 crtl->need_drap = true;
841 if (setjmp_alias_set == -1)
842 setjmp_alias_set = new_alias_set ();
844 buf_addr = convert_memory_address (Pmode, buf_addr);
846 buf_addr = force_reg (Pmode, buf_addr);
848 /* We require that the user must pass a second argument of 1, because
849 that is what builtin_setjmp will return. */
850 gcc_assert (value == const1_rtx);
852 last = get_last_insn ();
853 #ifdef HAVE_builtin_longjmp
854 if (HAVE_builtin_longjmp)
855 emit_insn (gen_builtin_longjmp (buf_addr));
856 else
857 #endif
859 fp = gen_rtx_MEM (Pmode, buf_addr);
860 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
861 GET_MODE_SIZE (Pmode)));
863 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
864 2 * GET_MODE_SIZE (Pmode)));
865 set_mem_alias_set (fp, setjmp_alias_set);
866 set_mem_alias_set (lab, setjmp_alias_set);
867 set_mem_alias_set (stack, setjmp_alias_set);
869 /* Pick up FP, label, and SP from the block and jump. This code is
870 from expand_goto in stmt.c; see there for detailed comments. */
871 #ifdef HAVE_nonlocal_goto
872 if (HAVE_nonlocal_goto)
873 /* We have to pass a value to the nonlocal_goto pattern that will
874 get copied into the static_chain pointer, but it does not matter
875 what that value is, because builtin_setjmp does not use it. */
876 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
877 else
878 #endif
880 lab = copy_to_reg (lab);
882 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
883 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
885 emit_move_insn (hard_frame_pointer_rtx, fp);
886 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
888 emit_use (hard_frame_pointer_rtx);
889 emit_use (stack_pointer_rtx);
890 emit_indirect_jump (lab);
894 /* Search backwards and mark the jump insn as a non-local goto.
895 Note that this precludes the use of __builtin_longjmp to a
896 __builtin_setjmp target in the same function. However, we've
897 already cautioned the user that these functions are for
898 internal exception handling use only. */
899 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
901 gcc_assert (insn != last);
903 if (JUMP_P (insn))
905 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
906 break;
908 else if (CALL_P (insn))
909 break;
913 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
914 and the address of the save area. */
916 static rtx
917 expand_builtin_nonlocal_goto (tree exp)
919 tree t_label, t_save_area;
920 rtx r_label, r_save_area, r_fp, r_sp, insn;
922 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
923 return NULL_RTX;
925 t_label = CALL_EXPR_ARG (exp, 0);
926 t_save_area = CALL_EXPR_ARG (exp, 1);
928 r_label = expand_normal (t_label);
929 r_label = convert_memory_address (Pmode, r_label);
930 r_save_area = expand_normal (t_save_area);
931 r_save_area = convert_memory_address (Pmode, r_save_area);
932 /* Copy the address of the save location to a register just in case it was based
933 on the frame pointer. */
934 r_save_area = copy_to_reg (r_save_area);
935 r_fp = gen_rtx_MEM (Pmode, r_save_area);
936 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
937 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
939 crtl->has_nonlocal_goto = 1;
941 #ifdef HAVE_nonlocal_goto
942 /* ??? We no longer need to pass the static chain value, afaik. */
943 if (HAVE_nonlocal_goto)
944 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
945 else
946 #endif
948 r_label = copy_to_reg (r_label);
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
953 /* Restore frame pointer for containing function.
954 This sets the actual hard register used for the frame pointer
955 to the location of the function's incoming static chain info.
956 The non-local goto handler will then adjust it to contain the
957 proper value and reload the argument pointer, if needed. */
958 emit_move_insn (hard_frame_pointer_rtx, r_fp);
959 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
961 /* USE of hard_frame_pointer_rtx added for consistency;
962 not clear if really needed. */
963 emit_use (hard_frame_pointer_rtx);
964 emit_use (stack_pointer_rtx);
966 /* If the architecture is using a GP register, we must
967 conservatively assume that the target function makes use of it.
968 The prologue of functions with nonlocal gotos must therefore
969 initialize the GP register to the appropriate value, and we
970 must then make sure that this value is live at the point
971 of the jump. (Note that this doesn't necessarily apply
972 to targets with a nonlocal_goto pattern; they are free
973 to implement it in their own way. Note also that this is
974 a no-op if the GP register is a global invariant.) */
975 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
976 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
977 emit_use (pic_offset_table_rtx);
979 emit_indirect_jump (r_label);
982 /* Search backwards to the jump insn and mark it as a
983 non-local goto. */
984 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
986 if (JUMP_P (insn))
988 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
989 break;
991 else if (CALL_P (insn))
992 break;
995 return const0_rtx;
998 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
999 (not all will be used on all machines) that was passed to __builtin_setjmp.
1000 It updates the stack pointer in that block to correspond to the current
1001 stack pointer. */
1003 static void
1004 expand_builtin_update_setjmp_buf (rtx buf_addr)
1006 enum machine_mode sa_mode = Pmode;
1007 rtx stack_save;
1010 #ifdef HAVE_save_stack_nonlocal
1011 if (HAVE_save_stack_nonlocal)
1012 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1013 #endif
1014 #ifdef STACK_SAVEAREA_MODE
1015 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1016 #endif
1018 stack_save
1019 = gen_rtx_MEM (sa_mode,
1020 memory_address
1021 (sa_mode,
1022 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1024 #ifdef HAVE_setjmp
1025 if (HAVE_setjmp)
1026 emit_insn (gen_setjmp ());
1027 #endif
1029 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1032 /* Expand a call to __builtin_prefetch. For a target that does not support
1033 data prefetch, evaluate the memory address argument in case it has side
1034 effects. */
1036 static void
1037 expand_builtin_prefetch (tree exp)
1039 tree arg0, arg1, arg2;
1040 int nargs;
1041 rtx op0, op1, op2;
1043 if (!validate_arglist (exp, POINTER_TYPE, 0))
1044 return;
1046 arg0 = CALL_EXPR_ARG (exp, 0);
1048 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1049 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1050 locality). */
1051 nargs = call_expr_nargs (exp);
1052 if (nargs > 1)
1053 arg1 = CALL_EXPR_ARG (exp, 1);
1054 else
1055 arg1 = integer_zero_node;
1056 if (nargs > 2)
1057 arg2 = CALL_EXPR_ARG (exp, 2);
1058 else
1059 arg2 = build_int_cst (NULL_TREE, 3);
1061 /* Argument 0 is an address. */
1062 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1064 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1065 if (TREE_CODE (arg1) != INTEGER_CST)
1067 error ("second argument to %<__builtin_prefetch%> must be a constant");
1068 arg1 = integer_zero_node;
1070 op1 = expand_normal (arg1);
1071 /* Argument 1 must be either zero or one. */
1072 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1074 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1075 " using zero");
1076 op1 = const0_rtx;
1079 /* Argument 2 (locality) must be a compile-time constant int. */
1080 if (TREE_CODE (arg2) != INTEGER_CST)
1082 error ("third argument to %<__builtin_prefetch%> must be a constant");
1083 arg2 = integer_zero_node;
1085 op2 = expand_normal (arg2);
1086 /* Argument 2 must be 0, 1, 2, or 3. */
1087 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1089 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1090 op2 = const0_rtx;
1093 #ifdef HAVE_prefetch
1094 if (HAVE_prefetch)
1096 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1097 (op0,
1098 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1099 || (GET_MODE (op0) != Pmode))
1101 op0 = convert_memory_address (Pmode, op0);
1102 op0 = force_reg (Pmode, op0);
1104 emit_insn (gen_prefetch (op0, op1, op2));
1106 #endif
1108 /* Don't do anything with direct references to volatile memory, but
1109 generate code to handle other side effects. */
1110 if (!MEM_P (op0) && side_effects_p (op0))
1111 emit_insn (op0);
1114 /* Get a MEM rtx for expression EXP which is the address of an operand
1115 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1116 the maximum length of the block of memory that might be accessed or
1117 NULL if unknown. */
1119 static rtx
1120 get_memory_rtx (tree exp, tree len)
1122 tree orig_exp = exp;
1123 rtx addr, mem;
1124 HOST_WIDE_INT off;
1126 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1127 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1128 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1129 exp = TREE_OPERAND (exp, 0);
1131 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1132 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1134 /* Get an expression we can use to find the attributes to assign to MEM.
1135 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1136 we can. First remove any nops. */
1137 while (CONVERT_EXPR_P (exp)
1138 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1139 exp = TREE_OPERAND (exp, 0);
1141 off = 0;
1142 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1143 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1144 && host_integerp (TREE_OPERAND (exp, 1), 0)
1145 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1146 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1147 else if (TREE_CODE (exp) == ADDR_EXPR)
1148 exp = TREE_OPERAND (exp, 0);
1149 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1150 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1151 else
1152 exp = NULL;
1154 /* Honor attributes derived from exp, except for the alias set
1155 (as builtin stringops may alias with anything) and the size
1156 (as stringops may access multiple array elements). */
1157 if (exp)
1159 set_mem_attributes (mem, exp, 0);
1161 if (off)
1162 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1164 /* Allow the string and memory builtins to overflow from one
1165 field into another, see http://gcc.gnu.org/PR23561.
1166 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1167 memory accessed by the string or memory builtin will fit
1168 within the field. */
1169 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1171 tree mem_expr = MEM_EXPR (mem);
1172 HOST_WIDE_INT offset = -1, length = -1;
1173 tree inner = exp;
1175 while (TREE_CODE (inner) == ARRAY_REF
1176 || CONVERT_EXPR_P (inner)
1177 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1178 || TREE_CODE (inner) == SAVE_EXPR)
1179 inner = TREE_OPERAND (inner, 0);
1181 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1183 if (MEM_OFFSET (mem)
1184 && CONST_INT_P (MEM_OFFSET (mem)))
1185 offset = INTVAL (MEM_OFFSET (mem));
1187 if (offset >= 0 && len && host_integerp (len, 0))
1188 length = tree_low_cst (len, 0);
1190 while (TREE_CODE (inner) == COMPONENT_REF)
1192 tree field = TREE_OPERAND (inner, 1);
1193 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1194 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1196 /* Bitfields are generally not byte-addressable. */
1197 gcc_assert (!DECL_BIT_FIELD (field)
1198 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199 % BITS_PER_UNIT) == 0
1200 && host_integerp (DECL_SIZE (field), 0)
1201 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1202 % BITS_PER_UNIT) == 0));
1204 /* If we can prove that the memory starting at XEXP (mem, 0) and
1205 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1206 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1207 fields without DECL_SIZE_UNIT like flexible array members. */
1208 if (length >= 0
1209 && DECL_SIZE_UNIT (field)
1210 && host_integerp (DECL_SIZE_UNIT (field), 0))
1212 HOST_WIDE_INT size
1213 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1214 if (offset <= size
1215 && length <= size
1216 && offset + length <= size)
1217 break;
1220 if (offset >= 0
1221 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1222 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1223 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1224 / BITS_PER_UNIT;
1225 else
1227 offset = -1;
1228 length = -1;
1231 mem_expr = TREE_OPERAND (mem_expr, 0);
1232 inner = TREE_OPERAND (inner, 0);
1235 if (mem_expr == NULL)
1236 offset = -1;
1237 if (mem_expr != MEM_EXPR (mem))
1239 set_mem_expr (mem, mem_expr);
1240 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1243 set_mem_alias_set (mem, 0);
1244 set_mem_size (mem, NULL_RTX);
1247 return mem;
1250 /* Built-in functions to perform an untyped call and return. */
1252 /* For each register that may be used for calling a function, this
1253 gives a mode used to copy the register's value. VOIDmode indicates
1254 the register is not used for calling a function. If the machine
1255 has register windows, this gives only the outbound registers.
1256 INCOMING_REGNO gives the corresponding inbound register. */
1257 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1259 /* For each register that may be used for returning values, this gives
1260 a mode used to copy the register's value. VOIDmode indicates the
1261 register is not used for returning values. If the machine has
1262 register windows, this gives only the outbound registers.
1263 INCOMING_REGNO gives the corresponding inbound register. */
1264 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1266 /* Return the size required for the block returned by __builtin_apply_args,
1267 and initialize apply_args_mode. */
1269 static int
1270 apply_args_size (void)
1272 static int size = -1;
1273 int align;
1274 unsigned int regno;
1275 enum machine_mode mode;
1277 /* The values computed by this function never change. */
1278 if (size < 0)
1280 /* The first value is the incoming arg-pointer. */
1281 size = GET_MODE_SIZE (Pmode);
1283 /* The second value is the structure value address unless this is
1284 passed as an "invisible" first argument. */
1285 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1286 size += GET_MODE_SIZE (Pmode);
1288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1289 if (FUNCTION_ARG_REGNO_P (regno))
1291 mode = reg_raw_mode[regno];
1293 gcc_assert (mode != VOIDmode);
1295 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1296 if (size % align != 0)
1297 size = CEIL (size, align) * align;
1298 size += GET_MODE_SIZE (mode);
1299 apply_args_mode[regno] = mode;
1301 else
1303 apply_args_mode[regno] = VOIDmode;
1306 return size;
1309 /* Return the size required for the block returned by __builtin_apply,
1310 and initialize apply_result_mode. */
1312 static int
1313 apply_result_size (void)
1315 static int size = -1;
1316 int align, regno;
1317 enum machine_mode mode;
1319 /* The values computed by this function never change. */
1320 if (size < 0)
1322 size = 0;
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if (FUNCTION_VALUE_REGNO_P (regno))
1327 mode = reg_raw_mode[regno];
1329 gcc_assert (mode != VOIDmode);
1331 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1332 if (size % align != 0)
1333 size = CEIL (size, align) * align;
1334 size += GET_MODE_SIZE (mode);
1335 apply_result_mode[regno] = mode;
1337 else
1338 apply_result_mode[regno] = VOIDmode;
1340 /* Allow targets that use untyped_call and untyped_return to override
1341 the size so that machine-specific information can be stored here. */
1342 #ifdef APPLY_RESULT_SIZE
1343 size = APPLY_RESULT_SIZE;
1344 #endif
1346 return size;
1349 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1350 /* Create a vector describing the result block RESULT. If SAVEP is true,
1351 the result block is used to save the values; otherwise it is used to
1352 restore the values. */
1354 static rtx
1355 result_vector (int savep, rtx result)
1357 int regno, size, align, nelts;
1358 enum machine_mode mode;
1359 rtx reg, mem;
1360 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1362 size = nelts = 0;
1363 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1364 if ((mode = apply_result_mode[regno]) != VOIDmode)
1366 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1367 if (size % align != 0)
1368 size = CEIL (size, align) * align;
1369 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1370 mem = adjust_address (result, mode, size);
1371 savevec[nelts++] = (savep
1372 ? gen_rtx_SET (VOIDmode, mem, reg)
1373 : gen_rtx_SET (VOIDmode, reg, mem));
1374 size += GET_MODE_SIZE (mode);
1376 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1378 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1380 /* Save the state required to perform an untyped call with the same
1381 arguments as were passed to the current function. */
1383 static rtx
1384 expand_builtin_apply_args_1 (void)
1386 rtx registers, tem;
1387 int size, align, regno;
1388 enum machine_mode mode;
1389 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1391 /* Create a block where the arg-pointer, structure value address,
1392 and argument registers can be saved. */
1393 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1395 /* Walk past the arg-pointer and structure value address. */
1396 size = GET_MODE_SIZE (Pmode);
1397 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1398 size += GET_MODE_SIZE (Pmode);
1400 /* Save each register used in calling a function to the block. */
1401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402 if ((mode = apply_args_mode[regno]) != VOIDmode)
1404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1405 if (size % align != 0)
1406 size = CEIL (size, align) * align;
1408 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1410 emit_move_insn (adjust_address (registers, mode, size), tem);
1411 size += GET_MODE_SIZE (mode);
1414 /* Save the arg pointer to the block. */
1415 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1416 #ifdef STACK_GROWS_DOWNWARD
1417 /* We need the pointer as the caller actually passed them to us, not
1418 as we might have pretended they were passed. Make sure it's a valid
1419 operand, as emit_move_insn isn't expected to handle a PLUS. */
1421 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1422 NULL_RTX);
1423 #endif
1424 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1426 size = GET_MODE_SIZE (Pmode);
1428 /* Save the structure value address unless this is passed as an
1429 "invisible" first argument. */
1430 if (struct_incoming_value)
1432 emit_move_insn (adjust_address (registers, Pmode, size),
1433 copy_to_reg (struct_incoming_value));
1434 size += GET_MODE_SIZE (Pmode);
1437 /* Return the address of the block. */
1438 return copy_addr_to_reg (XEXP (registers, 0));
1441 /* __builtin_apply_args returns block of memory allocated on
1442 the stack into which is stored the arg pointer, structure
1443 value address, static chain, and all the registers that might
1444 possibly be used in performing a function call. The code is
1445 moved to the start of the function so the incoming values are
1446 saved. */
1448 static rtx
1449 expand_builtin_apply_args (void)
1451 /* Don't do __builtin_apply_args more than once in a function.
1452 Save the result of the first call and reuse it. */
1453 if (apply_args_value != 0)
1454 return apply_args_value;
1456 /* When this function is called, it means that registers must be
1457 saved on entry to this function. So we migrate the
1458 call to the first insn of this function. */
1459 rtx temp;
1460 rtx seq;
1462 start_sequence ();
1463 temp = expand_builtin_apply_args_1 ();
1464 seq = get_insns ();
1465 end_sequence ();
1467 apply_args_value = temp;
1469 /* Put the insns after the NOTE that starts the function.
1470 If this is inside a start_sequence, make the outer-level insn
1471 chain current, so the code is placed at the start of the
1472 function. If internal_arg_pointer is a non-virtual pseudo,
1473 it needs to be placed after the function that initializes
1474 that pseudo. */
1475 push_topmost_sequence ();
1476 if (REG_P (crtl->args.internal_arg_pointer)
1477 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1478 emit_insn_before (seq, parm_birth_insn);
1479 else
1480 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1481 pop_topmost_sequence ();
1482 return temp;
1486 /* Perform an untyped call and save the state required to perform an
1487 untyped return of whatever value was returned by the given function. */
1489 static rtx
1490 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1492 int size, align, regno;
1493 enum machine_mode mode;
1494 rtx incoming_args, result, reg, dest, src, call_insn;
1495 rtx old_stack_level = 0;
1496 rtx call_fusage = 0;
1497 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1499 arguments = convert_memory_address (Pmode, arguments);
1501 /* Create a block where the return registers can be saved. */
1502 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1504 /* Fetch the arg pointer from the ARGUMENTS block. */
1505 incoming_args = gen_reg_rtx (Pmode);
1506 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1507 #ifndef STACK_GROWS_DOWNWARD
1508 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1509 incoming_args, 0, OPTAB_LIB_WIDEN);
1510 #endif
1512 /* Push a new argument block and copy the arguments. Do not allow
1513 the (potential) memcpy call below to interfere with our stack
1514 manipulations. */
1515 do_pending_stack_adjust ();
1516 NO_DEFER_POP;
1518 /* Save the stack with nonlocal if available. */
1519 #ifdef HAVE_save_stack_nonlocal
1520 if (HAVE_save_stack_nonlocal)
1521 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1522 else
1523 #endif
1524 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1526 /* Allocate a block of memory onto the stack and copy the memory
1527 arguments to the outgoing arguments address. */
1528 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1530 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1531 may have already set current_function_calls_alloca to true.
1532 current_function_calls_alloca won't be set if argsize is zero,
1533 so we have to guarantee need_drap is true here. */
1534 if (SUPPORTS_STACK_ALIGNMENT)
1535 crtl->need_drap = true;
1537 dest = virtual_outgoing_args_rtx;
1538 #ifndef STACK_GROWS_DOWNWARD
1539 if (CONST_INT_P (argsize))
1540 dest = plus_constant (dest, -INTVAL (argsize));
1541 else
1542 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1543 #endif
1544 dest = gen_rtx_MEM (BLKmode, dest);
1545 set_mem_align (dest, PARM_BOUNDARY);
1546 src = gen_rtx_MEM (BLKmode, incoming_args);
1547 set_mem_align (src, PARM_BOUNDARY);
1548 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1550 /* Refer to the argument block. */
1551 apply_args_size ();
1552 arguments = gen_rtx_MEM (BLKmode, arguments);
1553 set_mem_align (arguments, PARM_BOUNDARY);
1555 /* Walk past the arg-pointer and structure value address. */
1556 size = GET_MODE_SIZE (Pmode);
1557 if (struct_value)
1558 size += GET_MODE_SIZE (Pmode);
1560 /* Restore each of the registers previously saved. Make USE insns
1561 for each of these registers for use in making the call. */
1562 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1563 if ((mode = apply_args_mode[regno]) != VOIDmode)
1565 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1566 if (size % align != 0)
1567 size = CEIL (size, align) * align;
1568 reg = gen_rtx_REG (mode, regno);
1569 emit_move_insn (reg, adjust_address (arguments, mode, size));
1570 use_reg (&call_fusage, reg);
1571 size += GET_MODE_SIZE (mode);
1574 /* Restore the structure value address unless this is passed as an
1575 "invisible" first argument. */
1576 size = GET_MODE_SIZE (Pmode);
1577 if (struct_value)
1579 rtx value = gen_reg_rtx (Pmode);
1580 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1581 emit_move_insn (struct_value, value);
1582 if (REG_P (struct_value))
1583 use_reg (&call_fusage, struct_value);
1584 size += GET_MODE_SIZE (Pmode);
1587 /* All arguments and registers used for the call are set up by now! */
1588 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1590 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1591 and we don't want to load it into a register as an optimization,
1592 because prepare_call_address already did it if it should be done. */
1593 if (GET_CODE (function) != SYMBOL_REF)
1594 function = memory_address (FUNCTION_MODE, function);
1596 /* Generate the actual call instruction and save the return value. */
1597 #ifdef HAVE_untyped_call
1598 if (HAVE_untyped_call)
1599 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1600 result, result_vector (1, result)));
1601 else
1602 #endif
1603 #ifdef HAVE_call_value
1604 if (HAVE_call_value)
1606 rtx valreg = 0;
1608 /* Locate the unique return register. It is not possible to
1609 express a call that sets more than one return register using
1610 call_value; use untyped_call for that. In fact, untyped_call
1611 only needs to save the return registers in the given block. */
1612 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1613 if ((mode = apply_result_mode[regno]) != VOIDmode)
1615 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1617 valreg = gen_rtx_REG (mode, regno);
1620 emit_call_insn (GEN_CALL_VALUE (valreg,
1621 gen_rtx_MEM (FUNCTION_MODE, function),
1622 const0_rtx, NULL_RTX, const0_rtx));
1624 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1626 else
1627 #endif
1628 gcc_unreachable ();
1630 /* Find the CALL insn we just emitted, and attach the register usage
1631 information. */
1632 call_insn = last_call_insn ();
1633 add_function_usage_to (call_insn, call_fusage);
1635 /* Restore the stack. */
1636 #ifdef HAVE_save_stack_nonlocal
1637 if (HAVE_save_stack_nonlocal)
1638 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1639 else
1640 #endif
1641 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1643 OK_DEFER_POP;
1645 /* Return the address of the result block. */
1646 result = copy_addr_to_reg (XEXP (result, 0));
1647 return convert_memory_address (ptr_mode, result);
1650 /* Perform an untyped return. */
1652 static void
1653 expand_builtin_return (rtx result)
1655 int size, align, regno;
1656 enum machine_mode mode;
1657 rtx reg;
1658 rtx call_fusage = 0;
1660 result = convert_memory_address (Pmode, result);
1662 apply_result_size ();
1663 result = gen_rtx_MEM (BLKmode, result);
1665 #ifdef HAVE_untyped_return
1666 if (HAVE_untyped_return)
1668 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1669 emit_barrier ();
1670 return;
1672 #endif
1674 /* Restore the return value and note that each value is used. */
1675 size = 0;
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1679 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1680 if (size % align != 0)
1681 size = CEIL (size, align) * align;
1682 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1683 emit_move_insn (reg, adjust_address (result, mode, size));
1685 push_to_sequence (call_fusage);
1686 emit_use (reg);
1687 call_fusage = get_insns ();
1688 end_sequence ();
1689 size += GET_MODE_SIZE (mode);
1692 /* Put the USE insns before the return. */
1693 emit_insn (call_fusage);
1695 /* Return whatever values was restored by jumping directly to the end
1696 of the function. */
1697 expand_naked_return ();
1700 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1702 static enum type_class
1703 type_to_class (tree type)
1705 switch (TREE_CODE (type))
1707 case VOID_TYPE: return void_type_class;
1708 case INTEGER_TYPE: return integer_type_class;
1709 case ENUMERAL_TYPE: return enumeral_type_class;
1710 case BOOLEAN_TYPE: return boolean_type_class;
1711 case POINTER_TYPE: return pointer_type_class;
1712 case REFERENCE_TYPE: return reference_type_class;
1713 case OFFSET_TYPE: return offset_type_class;
1714 case REAL_TYPE: return real_type_class;
1715 case COMPLEX_TYPE: return complex_type_class;
1716 case FUNCTION_TYPE: return function_type_class;
1717 case METHOD_TYPE: return method_type_class;
1718 case RECORD_TYPE: return record_type_class;
1719 case UNION_TYPE:
1720 case QUAL_UNION_TYPE: return union_type_class;
1721 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1722 ? string_type_class : array_type_class);
1723 case LANG_TYPE: return lang_type_class;
1724 default: return no_type_class;
1728 /* Expand a call EXP to __builtin_classify_type. */
1730 static rtx
1731 expand_builtin_classify_type (tree exp)
1733 if (call_expr_nargs (exp))
1734 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1735 return GEN_INT (no_type_class);
1738 /* This helper macro, meant to be used in mathfn_built_in below,
1739 determines which among a set of three builtin math functions is
1740 appropriate for a given type mode. The `F' and `L' cases are
1741 automatically generated from the `double' case. */
1742 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1743 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1744 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1745 fcodel = BUILT_IN_MATHFN##L ; break;
1746 /* Similar to above, but appends _R after any F/L suffix. */
1747 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1748 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1749 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1750 fcodel = BUILT_IN_MATHFN##L_R ; break;
1752 /* Return mathematic function equivalent to FN but operating directly
1753 on TYPE, if available. If IMPLICIT is true find the function in
1754 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1755 can't do the conversion, return zero. */
1757 static tree
1758 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1760 tree const *const fn_arr
1761 = implicit ? implicit_built_in_decls : built_in_decls;
1762 enum built_in_function fcode, fcodef, fcodel;
1764 switch (fn)
1766 CASE_MATHFN (BUILT_IN_ACOS)
1767 CASE_MATHFN (BUILT_IN_ACOSH)
1768 CASE_MATHFN (BUILT_IN_ASIN)
1769 CASE_MATHFN (BUILT_IN_ASINH)
1770 CASE_MATHFN (BUILT_IN_ATAN)
1771 CASE_MATHFN (BUILT_IN_ATAN2)
1772 CASE_MATHFN (BUILT_IN_ATANH)
1773 CASE_MATHFN (BUILT_IN_CBRT)
1774 CASE_MATHFN (BUILT_IN_CEIL)
1775 CASE_MATHFN (BUILT_IN_CEXPI)
1776 CASE_MATHFN (BUILT_IN_COPYSIGN)
1777 CASE_MATHFN (BUILT_IN_COS)
1778 CASE_MATHFN (BUILT_IN_COSH)
1779 CASE_MATHFN (BUILT_IN_DREM)
1780 CASE_MATHFN (BUILT_IN_ERF)
1781 CASE_MATHFN (BUILT_IN_ERFC)
1782 CASE_MATHFN (BUILT_IN_EXP)
1783 CASE_MATHFN (BUILT_IN_EXP10)
1784 CASE_MATHFN (BUILT_IN_EXP2)
1785 CASE_MATHFN (BUILT_IN_EXPM1)
1786 CASE_MATHFN (BUILT_IN_FABS)
1787 CASE_MATHFN (BUILT_IN_FDIM)
1788 CASE_MATHFN (BUILT_IN_FLOOR)
1789 CASE_MATHFN (BUILT_IN_FMA)
1790 CASE_MATHFN (BUILT_IN_FMAX)
1791 CASE_MATHFN (BUILT_IN_FMIN)
1792 CASE_MATHFN (BUILT_IN_FMOD)
1793 CASE_MATHFN (BUILT_IN_FREXP)
1794 CASE_MATHFN (BUILT_IN_GAMMA)
1795 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1796 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1797 CASE_MATHFN (BUILT_IN_HYPOT)
1798 CASE_MATHFN (BUILT_IN_ILOGB)
1799 CASE_MATHFN (BUILT_IN_INF)
1800 CASE_MATHFN (BUILT_IN_ISINF)
1801 CASE_MATHFN (BUILT_IN_J0)
1802 CASE_MATHFN (BUILT_IN_J1)
1803 CASE_MATHFN (BUILT_IN_JN)
1804 CASE_MATHFN (BUILT_IN_LCEIL)
1805 CASE_MATHFN (BUILT_IN_LDEXP)
1806 CASE_MATHFN (BUILT_IN_LFLOOR)
1807 CASE_MATHFN (BUILT_IN_LGAMMA)
1808 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1809 CASE_MATHFN (BUILT_IN_LLCEIL)
1810 CASE_MATHFN (BUILT_IN_LLFLOOR)
1811 CASE_MATHFN (BUILT_IN_LLRINT)
1812 CASE_MATHFN (BUILT_IN_LLROUND)
1813 CASE_MATHFN (BUILT_IN_LOG)
1814 CASE_MATHFN (BUILT_IN_LOG10)
1815 CASE_MATHFN (BUILT_IN_LOG1P)
1816 CASE_MATHFN (BUILT_IN_LOG2)
1817 CASE_MATHFN (BUILT_IN_LOGB)
1818 CASE_MATHFN (BUILT_IN_LRINT)
1819 CASE_MATHFN (BUILT_IN_LROUND)
1820 CASE_MATHFN (BUILT_IN_MODF)
1821 CASE_MATHFN (BUILT_IN_NAN)
1822 CASE_MATHFN (BUILT_IN_NANS)
1823 CASE_MATHFN (BUILT_IN_NEARBYINT)
1824 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1825 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1826 CASE_MATHFN (BUILT_IN_POW)
1827 CASE_MATHFN (BUILT_IN_POWI)
1828 CASE_MATHFN (BUILT_IN_POW10)
1829 CASE_MATHFN (BUILT_IN_REMAINDER)
1830 CASE_MATHFN (BUILT_IN_REMQUO)
1831 CASE_MATHFN (BUILT_IN_RINT)
1832 CASE_MATHFN (BUILT_IN_ROUND)
1833 CASE_MATHFN (BUILT_IN_SCALB)
1834 CASE_MATHFN (BUILT_IN_SCALBLN)
1835 CASE_MATHFN (BUILT_IN_SCALBN)
1836 CASE_MATHFN (BUILT_IN_SIGNBIT)
1837 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1838 CASE_MATHFN (BUILT_IN_SIN)
1839 CASE_MATHFN (BUILT_IN_SINCOS)
1840 CASE_MATHFN (BUILT_IN_SINH)
1841 CASE_MATHFN (BUILT_IN_SQRT)
1842 CASE_MATHFN (BUILT_IN_TAN)
1843 CASE_MATHFN (BUILT_IN_TANH)
1844 CASE_MATHFN (BUILT_IN_TGAMMA)
1845 CASE_MATHFN (BUILT_IN_TRUNC)
1846 CASE_MATHFN (BUILT_IN_Y0)
1847 CASE_MATHFN (BUILT_IN_Y1)
1848 CASE_MATHFN (BUILT_IN_YN)
1850 default:
1851 return NULL_TREE;
1854 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1855 return fn_arr[fcode];
1856 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1857 return fn_arr[fcodef];
1858 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1859 return fn_arr[fcodel];
1860 else
1861 return NULL_TREE;
1864 /* Like mathfn_built_in_1(), but always use the implicit array. */
1866 tree
1867 mathfn_built_in (tree type, enum built_in_function fn)
1869 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1872 /* If errno must be maintained, expand the RTL to check if the result,
1873 TARGET, of a built-in function call, EXP, is NaN, and if so set
1874 errno to EDOM. */
1876 static void
1877 expand_errno_check (tree exp, rtx target)
1879 rtx lab = gen_label_rtx ();
1881 /* Test the result; if it is NaN, set errno=EDOM because
1882 the argument was not in the domain. */
1883 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1884 NULL_RTX, NULL_RTX, lab,
1885 /* The jump is very likely. */
1886 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1888 #ifdef TARGET_EDOM
1889 /* If this built-in doesn't throw an exception, set errno directly. */
1890 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1892 #ifdef GEN_ERRNO_RTX
1893 rtx errno_rtx = GEN_ERRNO_RTX;
1894 #else
1895 rtx errno_rtx
1896 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1897 #endif
1898 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1899 emit_label (lab);
1900 return;
1902 #endif
1904 /* Make sure the library call isn't expanded as a tail call. */
1905 CALL_EXPR_TAILCALL (exp) = 0;
1907 /* We can't set errno=EDOM directly; let the library call do it.
1908 Pop the arguments right away in case the call gets deleted. */
1909 NO_DEFER_POP;
1910 expand_call (exp, target, 0);
1911 OK_DEFER_POP;
1912 emit_label (lab);
1915 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1916 Return NULL_RTX if a normal call should be emitted rather than expanding
1917 the function in-line. EXP is the expression that is a call to the builtin
1918 function; if convenient, the result should be placed in TARGET.
1919 SUBTARGET may be used as the target for computing one of EXP's operands. */
1921 static rtx
1922 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1924 optab builtin_optab;
1925 rtx op0, insns;
1926 tree fndecl = get_callee_fndecl (exp);
1927 enum machine_mode mode;
1928 bool errno_set = false;
1929 tree arg;
1931 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1932 return NULL_RTX;
1934 arg = CALL_EXPR_ARG (exp, 0);
1936 switch (DECL_FUNCTION_CODE (fndecl))
1938 CASE_FLT_FN (BUILT_IN_SQRT):
1939 errno_set = ! tree_expr_nonnegative_p (arg);
1940 builtin_optab = sqrt_optab;
1941 break;
1942 CASE_FLT_FN (BUILT_IN_EXP):
1943 errno_set = true; builtin_optab = exp_optab; break;
1944 CASE_FLT_FN (BUILT_IN_EXP10):
1945 CASE_FLT_FN (BUILT_IN_POW10):
1946 errno_set = true; builtin_optab = exp10_optab; break;
1947 CASE_FLT_FN (BUILT_IN_EXP2):
1948 errno_set = true; builtin_optab = exp2_optab; break;
1949 CASE_FLT_FN (BUILT_IN_EXPM1):
1950 errno_set = true; builtin_optab = expm1_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOGB):
1952 errno_set = true; builtin_optab = logb_optab; break;
1953 CASE_FLT_FN (BUILT_IN_LOG):
1954 errno_set = true; builtin_optab = log_optab; break;
1955 CASE_FLT_FN (BUILT_IN_LOG10):
1956 errno_set = true; builtin_optab = log10_optab; break;
1957 CASE_FLT_FN (BUILT_IN_LOG2):
1958 errno_set = true; builtin_optab = log2_optab; break;
1959 CASE_FLT_FN (BUILT_IN_LOG1P):
1960 errno_set = true; builtin_optab = log1p_optab; break;
1961 CASE_FLT_FN (BUILT_IN_ASIN):
1962 builtin_optab = asin_optab; break;
1963 CASE_FLT_FN (BUILT_IN_ACOS):
1964 builtin_optab = acos_optab; break;
1965 CASE_FLT_FN (BUILT_IN_TAN):
1966 builtin_optab = tan_optab; break;
1967 CASE_FLT_FN (BUILT_IN_ATAN):
1968 builtin_optab = atan_optab; break;
1969 CASE_FLT_FN (BUILT_IN_FLOOR):
1970 builtin_optab = floor_optab; break;
1971 CASE_FLT_FN (BUILT_IN_CEIL):
1972 builtin_optab = ceil_optab; break;
1973 CASE_FLT_FN (BUILT_IN_TRUNC):
1974 builtin_optab = btrunc_optab; break;
1975 CASE_FLT_FN (BUILT_IN_ROUND):
1976 builtin_optab = round_optab; break;
1977 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1978 builtin_optab = nearbyint_optab;
1979 if (flag_trapping_math)
1980 break;
1981 /* Else fallthrough and expand as rint. */
1982 CASE_FLT_FN (BUILT_IN_RINT):
1983 builtin_optab = rint_optab; break;
1984 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1985 builtin_optab = significand_optab; break;
1986 default:
1987 gcc_unreachable ();
1990 /* Make a suitable register to place result in. */
1991 mode = TYPE_MODE (TREE_TYPE (exp));
1993 if (! flag_errno_math || ! HONOR_NANS (mode))
1994 errno_set = false;
1996 /* Before working hard, check whether the instruction is available. */
1997 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1999 target = gen_reg_rtx (mode);
2001 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2002 need to expand the argument again. This way, we will not perform
2003 side-effects more the once. */
2004 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2006 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2008 start_sequence ();
2010 /* Compute into TARGET.
2011 Set TARGET to wherever the result comes back. */
2012 target = expand_unop (mode, builtin_optab, op0, target, 0);
2014 if (target != 0)
2016 if (errno_set)
2017 expand_errno_check (exp, target);
2019 /* Output the entire sequence. */
2020 insns = get_insns ();
2021 end_sequence ();
2022 emit_insn (insns);
2023 return target;
2026 /* If we were unable to expand via the builtin, stop the sequence
2027 (without outputting the insns) and call to the library function
2028 with the stabilized argument list. */
2029 end_sequence ();
2032 return expand_call (exp, target, target == const0_rtx);
2035 /* Expand a call to the builtin binary math functions (pow and atan2).
2036 Return NULL_RTX if a normal call should be emitted rather than expanding the
2037 function in-line. EXP is the expression that is a call to the builtin
2038 function; if convenient, the result should be placed in TARGET.
2039 SUBTARGET may be used as the target for computing one of EXP's
2040 operands. */
2042 static rtx
2043 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2045 optab builtin_optab;
2046 rtx op0, op1, insns;
2047 int op1_type = REAL_TYPE;
2048 tree fndecl = get_callee_fndecl (exp);
2049 tree arg0, arg1;
2050 enum machine_mode mode;
2051 bool errno_set = true;
2053 switch (DECL_FUNCTION_CODE (fndecl))
2055 CASE_FLT_FN (BUILT_IN_SCALBN):
2056 CASE_FLT_FN (BUILT_IN_SCALBLN):
2057 CASE_FLT_FN (BUILT_IN_LDEXP):
2058 op1_type = INTEGER_TYPE;
2059 default:
2060 break;
2063 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2064 return NULL_RTX;
2066 arg0 = CALL_EXPR_ARG (exp, 0);
2067 arg1 = CALL_EXPR_ARG (exp, 1);
2069 switch (DECL_FUNCTION_CODE (fndecl))
2071 CASE_FLT_FN (BUILT_IN_POW):
2072 builtin_optab = pow_optab; break;
2073 CASE_FLT_FN (BUILT_IN_ATAN2):
2074 builtin_optab = atan2_optab; break;
2075 CASE_FLT_FN (BUILT_IN_SCALB):
2076 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2077 return 0;
2078 builtin_optab = scalb_optab; break;
2079 CASE_FLT_FN (BUILT_IN_SCALBN):
2080 CASE_FLT_FN (BUILT_IN_SCALBLN):
2081 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2082 return 0;
2083 /* Fall through... */
2084 CASE_FLT_FN (BUILT_IN_LDEXP):
2085 builtin_optab = ldexp_optab; break;
2086 CASE_FLT_FN (BUILT_IN_FMOD):
2087 builtin_optab = fmod_optab; break;
2088 CASE_FLT_FN (BUILT_IN_REMAINDER):
2089 CASE_FLT_FN (BUILT_IN_DREM):
2090 builtin_optab = remainder_optab; break;
2091 default:
2092 gcc_unreachable ();
2095 /* Make a suitable register to place result in. */
2096 mode = TYPE_MODE (TREE_TYPE (exp));
2098 /* Before working hard, check whether the instruction is available. */
2099 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2100 return NULL_RTX;
2102 target = gen_reg_rtx (mode);
2104 if (! flag_errno_math || ! HONOR_NANS (mode))
2105 errno_set = false;
2107 /* Always stabilize the argument list. */
2108 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2109 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2111 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2112 op1 = expand_normal (arg1);
2114 start_sequence ();
2116 /* Compute into TARGET.
2117 Set TARGET to wherever the result comes back. */
2118 target = expand_binop (mode, builtin_optab, op0, op1,
2119 target, 0, OPTAB_DIRECT);
2121 /* If we were unable to expand via the builtin, stop the sequence
2122 (without outputting the insns) and call to the library function
2123 with the stabilized argument list. */
2124 if (target == 0)
2126 end_sequence ();
2127 return expand_call (exp, target, target == const0_rtx);
2130 if (errno_set)
2131 expand_errno_check (exp, target);
2133 /* Output the entire sequence. */
2134 insns = get_insns ();
2135 end_sequence ();
2136 emit_insn (insns);
2138 return target;
2141 /* Expand a call to the builtin sin and cos math functions.
2142 Return NULL_RTX if a normal call should be emitted rather than expanding the
2143 function in-line. EXP is the expression that is a call to the builtin
2144 function; if convenient, the result should be placed in TARGET.
2145 SUBTARGET may be used as the target for computing one of EXP's
2146 operands. */
2148 static rtx
2149 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2151 optab builtin_optab;
2152 rtx op0, insns;
2153 tree fndecl = get_callee_fndecl (exp);
2154 enum machine_mode mode;
2155 tree arg;
2157 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2158 return NULL_RTX;
2160 arg = CALL_EXPR_ARG (exp, 0);
2162 switch (DECL_FUNCTION_CODE (fndecl))
2164 CASE_FLT_FN (BUILT_IN_SIN):
2165 CASE_FLT_FN (BUILT_IN_COS):
2166 builtin_optab = sincos_optab; break;
2167 default:
2168 gcc_unreachable ();
2171 /* Make a suitable register to place result in. */
2172 mode = TYPE_MODE (TREE_TYPE (exp));
2174 /* Check if sincos insn is available, otherwise fallback
2175 to sin or cos insn. */
2176 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2177 switch (DECL_FUNCTION_CODE (fndecl))
2179 CASE_FLT_FN (BUILT_IN_SIN):
2180 builtin_optab = sin_optab; break;
2181 CASE_FLT_FN (BUILT_IN_COS):
2182 builtin_optab = cos_optab; break;
2183 default:
2184 gcc_unreachable ();
2187 /* Before working hard, check whether the instruction is available. */
2188 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2190 target = gen_reg_rtx (mode);
2192 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2193 need to expand the argument again. This way, we will not perform
2194 side-effects more the once. */
2195 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2197 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2199 start_sequence ();
2201 /* Compute into TARGET.
2202 Set TARGET to wherever the result comes back. */
2203 if (builtin_optab == sincos_optab)
2205 int result;
2207 switch (DECL_FUNCTION_CODE (fndecl))
2209 CASE_FLT_FN (BUILT_IN_SIN):
2210 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2211 break;
2212 CASE_FLT_FN (BUILT_IN_COS):
2213 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2214 break;
2215 default:
2216 gcc_unreachable ();
2218 gcc_assert (result);
2220 else
2222 target = expand_unop (mode, builtin_optab, op0, target, 0);
2225 if (target != 0)
2227 /* Output the entire sequence. */
2228 insns = get_insns ();
2229 end_sequence ();
2230 emit_insn (insns);
2231 return target;
2234 /* If we were unable to expand via the builtin, stop the sequence
2235 (without outputting the insns) and call to the library function
2236 with the stabilized argument list. */
2237 end_sequence ();
2240 target = expand_call (exp, target, target == const0_rtx);
2242 return target;
2245 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2246 return an RTL instruction code that implements the functionality.
2247 If that isn't possible or available return CODE_FOR_nothing. */
2249 static enum insn_code
2250 interclass_mathfn_icode (tree arg, tree fndecl)
2252 bool errno_set = false;
2253 optab builtin_optab = 0;
2254 enum machine_mode mode;
2256 switch (DECL_FUNCTION_CODE (fndecl))
2258 CASE_FLT_FN (BUILT_IN_ILOGB):
2259 errno_set = true; builtin_optab = ilogb_optab; break;
2260 CASE_FLT_FN (BUILT_IN_ISINF):
2261 builtin_optab = isinf_optab; break;
2262 case BUILT_IN_ISNORMAL:
2263 case BUILT_IN_ISFINITE:
2264 CASE_FLT_FN (BUILT_IN_FINITE):
2265 case BUILT_IN_FINITED32:
2266 case BUILT_IN_FINITED64:
2267 case BUILT_IN_FINITED128:
2268 case BUILT_IN_ISINFD32:
2269 case BUILT_IN_ISINFD64:
2270 case BUILT_IN_ISINFD128:
2271 /* These builtins have no optabs (yet). */
2272 break;
2273 default:
2274 gcc_unreachable ();
2277 /* There's no easy way to detect the case we need to set EDOM. */
2278 if (flag_errno_math && errno_set)
2279 return CODE_FOR_nothing;
2281 /* Optab mode depends on the mode of the input argument. */
2282 mode = TYPE_MODE (TREE_TYPE (arg));
2284 if (builtin_optab)
2285 return optab_handler (builtin_optab, mode)->insn_code;
2286 return CODE_FOR_nothing;
2289 /* Expand a call to one of the builtin math functions that operate on
2290 floating point argument and output an integer result (ilogb, isinf,
2291 isnan, etc).
2292 Return 0 if a normal call should be emitted rather than expanding the
2293 function in-line. EXP is the expression that is a call to the builtin
2294 function; if convenient, the result should be placed in TARGET.
2295 SUBTARGET may be used as the target for computing one of EXP's operands. */
2297 static rtx
2298 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2300 enum insn_code icode = CODE_FOR_nothing;
2301 rtx op0;
2302 tree fndecl = get_callee_fndecl (exp);
2303 enum machine_mode mode;
2304 tree arg;
2306 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2307 return NULL_RTX;
2309 arg = CALL_EXPR_ARG (exp, 0);
2310 icode = interclass_mathfn_icode (arg, fndecl);
2311 mode = TYPE_MODE (TREE_TYPE (arg));
2313 if (icode != CODE_FOR_nothing)
2315 rtx last = get_last_insn ();
2316 tree orig_arg = arg;
2317 /* Make a suitable register to place result in. */
2318 if (!target
2319 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2320 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2322 gcc_assert (insn_data[icode].operand[0].predicate
2323 (target, GET_MODE (target)));
2325 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2326 need to expand the argument again. This way, we will not perform
2327 side-effects more the once. */
2328 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2330 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2332 if (mode != GET_MODE (op0))
2333 op0 = convert_to_mode (mode, op0, 0);
2335 /* Compute into TARGET.
2336 Set TARGET to wherever the result comes back. */
2337 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2338 return target;
2339 delete_insns_since (last);
2340 CALL_EXPR_ARG (exp, 0) = orig_arg;
2343 return NULL_RTX;
2346 /* Expand a call to the builtin sincos math function.
2347 Return NULL_RTX if a normal call should be emitted rather than expanding the
2348 function in-line. EXP is the expression that is a call to the builtin
2349 function. */
2351 static rtx
2352 expand_builtin_sincos (tree exp)
2354 rtx op0, op1, op2, target1, target2;
2355 enum machine_mode mode;
2356 tree arg, sinp, cosp;
2357 int result;
2358 location_t loc = EXPR_LOCATION (exp);
2360 if (!validate_arglist (exp, REAL_TYPE,
2361 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2362 return NULL_RTX;
2364 arg = CALL_EXPR_ARG (exp, 0);
2365 sinp = CALL_EXPR_ARG (exp, 1);
2366 cosp = CALL_EXPR_ARG (exp, 2);
2368 /* Make a suitable register to place result in. */
2369 mode = TYPE_MODE (TREE_TYPE (arg));
2371 /* Check if sincos insn is available, otherwise emit the call. */
2372 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2373 return NULL_RTX;
2375 target1 = gen_reg_rtx (mode);
2376 target2 = gen_reg_rtx (mode);
2378 op0 = expand_normal (arg);
2379 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2380 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2382 /* Compute into target1 and target2.
2383 Set TARGET to wherever the result comes back. */
2384 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2385 gcc_assert (result);
2387 /* Move target1 and target2 to the memory locations indicated
2388 by op1 and op2. */
2389 emit_move_insn (op1, target1);
2390 emit_move_insn (op2, target2);
2392 return const0_rtx;
2395 /* Expand a call to the internal cexpi builtin to the sincos math function.
2396 EXP is the expression that is a call to the builtin function; if convenient,
2397 the result should be placed in TARGET. SUBTARGET may be used as the target
2398 for computing one of EXP's operands. */
2400 static rtx
2401 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2403 tree fndecl = get_callee_fndecl (exp);
2404 tree arg, type;
2405 enum machine_mode mode;
2406 rtx op0, op1, op2;
2407 location_t loc = EXPR_LOCATION (exp);
2409 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2410 return NULL_RTX;
2412 arg = CALL_EXPR_ARG (exp, 0);
2413 type = TREE_TYPE (arg);
2414 mode = TYPE_MODE (TREE_TYPE (arg));
2416 /* Try expanding via a sincos optab, fall back to emitting a libcall
2417 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2418 is only generated from sincos, cexp or if we have either of them. */
2419 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2421 op1 = gen_reg_rtx (mode);
2422 op2 = gen_reg_rtx (mode);
2424 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2426 /* Compute into op1 and op2. */
2427 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2429 else if (TARGET_HAS_SINCOS)
2431 tree call, fn = NULL_TREE;
2432 tree top1, top2;
2433 rtx op1a, op2a;
2435 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2436 fn = built_in_decls[BUILT_IN_SINCOSF];
2437 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2438 fn = built_in_decls[BUILT_IN_SINCOS];
2439 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2440 fn = built_in_decls[BUILT_IN_SINCOSL];
2441 else
2442 gcc_unreachable ();
2444 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2445 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2446 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2447 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2448 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2449 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2451 /* Make sure not to fold the sincos call again. */
2452 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2453 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2454 call, 3, arg, top1, top2));
2456 else
2458 tree call, fn = NULL_TREE, narg;
2459 tree ctype = build_complex_type (type);
2461 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2462 fn = built_in_decls[BUILT_IN_CEXPF];
2463 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2464 fn = built_in_decls[BUILT_IN_CEXP];
2465 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2466 fn = built_in_decls[BUILT_IN_CEXPL];
2467 else
2468 gcc_unreachable ();
2470 /* If we don't have a decl for cexp create one. This is the
2471 friendliest fallback if the user calls __builtin_cexpi
2472 without full target C99 function support. */
2473 if (fn == NULL_TREE)
2475 tree fntype;
2476 const char *name = NULL;
2478 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2479 name = "cexpf";
2480 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2481 name = "cexp";
2482 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2483 name = "cexpl";
2485 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2486 fn = build_fn_decl (name, fntype);
2489 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2490 build_real (type, dconst0), arg);
2492 /* Make sure not to fold the cexp call again. */
2493 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2494 return expand_expr (build_call_nary (ctype, call, 1, narg),
2495 target, VOIDmode, EXPAND_NORMAL);
2498 /* Now build the proper return type. */
2499 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2500 make_tree (TREE_TYPE (arg), op2),
2501 make_tree (TREE_TYPE (arg), op1)),
2502 target, VOIDmode, EXPAND_NORMAL);
2505 /* Conveniently construct a function call expression. FNDECL names the
2506 function to be called, N is the number of arguments, and the "..."
2507 parameters are the argument expressions. Unlike build_call_exr
2508 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2510 static tree
2511 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2513 va_list ap;
2514 tree fntype = TREE_TYPE (fndecl);
2515 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2517 va_start (ap, n);
2518 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2519 va_end (ap);
2520 SET_EXPR_LOCATION (fn, loc);
2521 return fn;
2523 #define build_call_nofold(...) \
2524 build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
2526 /* Expand a call to one of the builtin rounding functions gcc defines
2527 as an extension (lfloor and lceil). As these are gcc extensions we
2528 do not need to worry about setting errno to EDOM.
2529 If expanding via optab fails, lower expression to (int)(floor(x)).
2530 EXP is the expression that is a call to the builtin function;
2531 if convenient, the result should be placed in TARGET. */
2533 static rtx
2534 expand_builtin_int_roundingfn (tree exp, rtx target)
2536 convert_optab builtin_optab;
2537 rtx op0, insns, tmp;
2538 tree fndecl = get_callee_fndecl (exp);
2539 enum built_in_function fallback_fn;
2540 tree fallback_fndecl;
2541 enum machine_mode mode;
2542 tree arg;
2544 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2545 gcc_unreachable ();
2547 arg = CALL_EXPR_ARG (exp, 0);
2549 switch (DECL_FUNCTION_CODE (fndecl))
2551 CASE_FLT_FN (BUILT_IN_LCEIL):
2552 CASE_FLT_FN (BUILT_IN_LLCEIL):
2553 builtin_optab = lceil_optab;
2554 fallback_fn = BUILT_IN_CEIL;
2555 break;
2557 CASE_FLT_FN (BUILT_IN_LFLOOR):
2558 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2559 builtin_optab = lfloor_optab;
2560 fallback_fn = BUILT_IN_FLOOR;
2561 break;
2563 default:
2564 gcc_unreachable ();
2567 /* Make a suitable register to place result in. */
2568 mode = TYPE_MODE (TREE_TYPE (exp));
2570 target = gen_reg_rtx (mode);
2572 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2573 need to expand the argument again. This way, we will not perform
2574 side-effects more the once. */
2575 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2577 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2579 start_sequence ();
2581 /* Compute into TARGET. */
2582 if (expand_sfix_optab (target, op0, builtin_optab))
2584 /* Output the entire sequence. */
2585 insns = get_insns ();
2586 end_sequence ();
2587 emit_insn (insns);
2588 return target;
2591 /* If we were unable to expand via the builtin, stop the sequence
2592 (without outputting the insns). */
2593 end_sequence ();
2595 /* Fall back to floating point rounding optab. */
2596 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2598 /* For non-C99 targets we may end up without a fallback fndecl here
2599 if the user called __builtin_lfloor directly. In this case emit
2600 a call to the floor/ceil variants nevertheless. This should result
2601 in the best user experience for not full C99 targets. */
2602 if (fallback_fndecl == NULL_TREE)
2604 tree fntype;
2605 const char *name = NULL;
2607 switch (DECL_FUNCTION_CODE (fndecl))
2609 case BUILT_IN_LCEIL:
2610 case BUILT_IN_LLCEIL:
2611 name = "ceil";
2612 break;
2613 case BUILT_IN_LCEILF:
2614 case BUILT_IN_LLCEILF:
2615 name = "ceilf";
2616 break;
2617 case BUILT_IN_LCEILL:
2618 case BUILT_IN_LLCEILL:
2619 name = "ceill";
2620 break;
2621 case BUILT_IN_LFLOOR:
2622 case BUILT_IN_LLFLOOR:
2623 name = "floor";
2624 break;
2625 case BUILT_IN_LFLOORF:
2626 case BUILT_IN_LLFLOORF:
2627 name = "floorf";
2628 break;
2629 case BUILT_IN_LFLOORL:
2630 case BUILT_IN_LLFLOORL:
2631 name = "floorl";
2632 break;
2633 default:
2634 gcc_unreachable ();
2637 fntype = build_function_type_list (TREE_TYPE (arg),
2638 TREE_TYPE (arg), NULL_TREE);
2639 fallback_fndecl = build_fn_decl (name, fntype);
2642 exp = build_call_nofold (fallback_fndecl, 1, arg);
2644 tmp = expand_normal (exp);
2646 /* Truncate the result of floating point optab to integer
2647 via expand_fix (). */
2648 target = gen_reg_rtx (mode);
2649 expand_fix (target, tmp, 0);
2651 return target;
2654 /* Expand a call to one of the builtin math functions doing integer
2655 conversion (lrint).
2656 Return 0 if a normal call should be emitted rather than expanding the
2657 function in-line. EXP is the expression that is a call to the builtin
2658 function; if convenient, the result should be placed in TARGET. */
2660 static rtx
2661 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2663 convert_optab builtin_optab;
2664 rtx op0, insns;
2665 tree fndecl = get_callee_fndecl (exp);
2666 tree arg;
2667 enum machine_mode mode;
2669 /* There's no easy way to detect the case we need to set EDOM. */
2670 if (flag_errno_math)
2671 return NULL_RTX;
2673 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2674 gcc_unreachable ();
2676 arg = CALL_EXPR_ARG (exp, 0);
2678 switch (DECL_FUNCTION_CODE (fndecl))
2680 CASE_FLT_FN (BUILT_IN_LRINT):
2681 CASE_FLT_FN (BUILT_IN_LLRINT):
2682 builtin_optab = lrint_optab; break;
2683 CASE_FLT_FN (BUILT_IN_LROUND):
2684 CASE_FLT_FN (BUILT_IN_LLROUND):
2685 builtin_optab = lround_optab; break;
2686 default:
2687 gcc_unreachable ();
2690 /* Make a suitable register to place result in. */
2691 mode = TYPE_MODE (TREE_TYPE (exp));
2693 target = gen_reg_rtx (mode);
2695 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2696 need to expand the argument again. This way, we will not perform
2697 side-effects more the once. */
2698 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2700 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2702 start_sequence ();
2704 if (expand_sfix_optab (target, op0, builtin_optab))
2706 /* Output the entire sequence. */
2707 insns = get_insns ();
2708 end_sequence ();
2709 emit_insn (insns);
2710 return target;
2713 /* If we were unable to expand via the builtin, stop the sequence
2714 (without outputting the insns) and call to the library function
2715 with the stabilized argument list. */
2716 end_sequence ();
2718 target = expand_call (exp, target, target == const0_rtx);
2720 return target;
2723 /* To evaluate powi(x,n), the floating point value x raised to the
2724 constant integer exponent n, we use a hybrid algorithm that
2725 combines the "window method" with look-up tables. For an
2726 introduction to exponentiation algorithms and "addition chains",
2727 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2728 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2729 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2730 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2732 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2733 multiplications to inline before calling the system library's pow
2734 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2735 so this default never requires calling pow, powf or powl. */
2737 #ifndef POWI_MAX_MULTS
2738 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2739 #endif
2741 /* The size of the "optimal power tree" lookup table. All
2742 exponents less than this value are simply looked up in the
2743 powi_table below. This threshold is also used to size the
2744 cache of pseudo registers that hold intermediate results. */
2745 #define POWI_TABLE_SIZE 256
2747 /* The size, in bits of the window, used in the "window method"
2748 exponentiation algorithm. This is equivalent to a radix of
2749 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2750 #define POWI_WINDOW_SIZE 3
2752 /* The following table is an efficient representation of an
2753 "optimal power tree". For each value, i, the corresponding
2754 value, j, in the table states than an optimal evaluation
2755 sequence for calculating pow(x,i) can be found by evaluating
2756 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2757 100 integers is given in Knuth's "Seminumerical algorithms". */
2759 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2761 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2762 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2763 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2764 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2765 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2766 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2767 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2768 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2769 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2770 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2771 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2772 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2773 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2774 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2775 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2776 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2777 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2778 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2779 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2780 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2781 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2782 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2783 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2784 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2785 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2786 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2787 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2788 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2789 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2790 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2791 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2792 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2796 /* Return the number of multiplications required to calculate
2797 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2798 subroutine of powi_cost. CACHE is an array indicating
2799 which exponents have already been calculated. */
2801 static int
2802 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2804 /* If we've already calculated this exponent, then this evaluation
2805 doesn't require any additional multiplications. */
2806 if (cache[n])
2807 return 0;
2809 cache[n] = true;
2810 return powi_lookup_cost (n - powi_table[n], cache)
2811 + powi_lookup_cost (powi_table[n], cache) + 1;
2814 /* Return the number of multiplications required to calculate
2815 powi(x,n) for an arbitrary x, given the exponent N. This
2816 function needs to be kept in sync with expand_powi below. */
2818 static int
2819 powi_cost (HOST_WIDE_INT n)
2821 bool cache[POWI_TABLE_SIZE];
2822 unsigned HOST_WIDE_INT digit;
2823 unsigned HOST_WIDE_INT val;
2824 int result;
2826 if (n == 0)
2827 return 0;
2829 /* Ignore the reciprocal when calculating the cost. */
2830 val = (n < 0) ? -n : n;
2832 /* Initialize the exponent cache. */
2833 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2834 cache[1] = true;
2836 result = 0;
2838 while (val >= POWI_TABLE_SIZE)
2840 if (val & 1)
2842 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2843 result += powi_lookup_cost (digit, cache)
2844 + POWI_WINDOW_SIZE + 1;
2845 val >>= POWI_WINDOW_SIZE;
2847 else
2849 val >>= 1;
2850 result++;
2854 return result + powi_lookup_cost (val, cache);
2857 /* Recursive subroutine of expand_powi. This function takes the array,
2858 CACHE, of already calculated exponents and an exponent N and returns
2859 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2861 static rtx
2862 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2864 unsigned HOST_WIDE_INT digit;
2865 rtx target, result;
2866 rtx op0, op1;
2868 if (n < POWI_TABLE_SIZE)
2870 if (cache[n])
2871 return cache[n];
2873 target = gen_reg_rtx (mode);
2874 cache[n] = target;
2876 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2877 op1 = expand_powi_1 (mode, powi_table[n], cache);
2879 else if (n & 1)
2881 target = gen_reg_rtx (mode);
2882 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2883 op0 = expand_powi_1 (mode, n - digit, cache);
2884 op1 = expand_powi_1 (mode, digit, cache);
2886 else
2888 target = gen_reg_rtx (mode);
2889 op0 = expand_powi_1 (mode, n >> 1, cache);
2890 op1 = op0;
2893 result = expand_mult (mode, op0, op1, target, 0);
2894 if (result != target)
2895 emit_move_insn (target, result);
2896 return target;
2899 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2900 floating point operand in mode MODE, and N is the exponent. This
2901 function needs to be kept in sync with powi_cost above. */
2903 static rtx
2904 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2906 rtx cache[POWI_TABLE_SIZE];
2907 rtx result;
2909 if (n == 0)
2910 return CONST1_RTX (mode);
2912 memset (cache, 0, sizeof (cache));
2913 cache[1] = x;
2915 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2917 /* If the original exponent was negative, reciprocate the result. */
2918 if (n < 0)
2919 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2920 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2922 return result;
2925 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2926 a normal call should be emitted rather than expanding the function
2927 in-line. EXP is the expression that is a call to the builtin
2928 function; if convenient, the result should be placed in TARGET. */
2930 static rtx
2931 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2933 tree arg0, arg1;
2934 tree fn, narg0;
2935 tree type = TREE_TYPE (exp);
2936 REAL_VALUE_TYPE cint, c, c2;
2937 HOST_WIDE_INT n;
2938 rtx op, op2;
2939 enum machine_mode mode = TYPE_MODE (type);
2941 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2942 return NULL_RTX;
2944 arg0 = CALL_EXPR_ARG (exp, 0);
2945 arg1 = CALL_EXPR_ARG (exp, 1);
2947 if (TREE_CODE (arg1) != REAL_CST
2948 || TREE_OVERFLOW (arg1))
2949 return expand_builtin_mathfn_2 (exp, target, subtarget);
2951 /* Handle constant exponents. */
2953 /* For integer valued exponents we can expand to an optimal multiplication
2954 sequence using expand_powi. */
2955 c = TREE_REAL_CST (arg1);
2956 n = real_to_integer (&c);
2957 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2958 if (real_identical (&c, &cint)
2959 && ((n >= -1 && n <= 2)
2960 || (flag_unsafe_math_optimizations
2961 && optimize_insn_for_speed_p ()
2962 && powi_cost (n) <= POWI_MAX_MULTS)))
2964 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2965 if (n != 1)
2967 op = force_reg (mode, op);
2968 op = expand_powi (op, mode, n);
2970 return op;
2973 narg0 = builtin_save_expr (arg0);
2975 /* If the exponent is not integer valued, check if it is half of an integer.
2976 In this case we can expand to sqrt (x) * x**(n/2). */
2977 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2978 if (fn != NULL_TREE)
2980 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2981 n = real_to_integer (&c2);
2982 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2983 if (real_identical (&c2, &cint)
2984 && ((flag_unsafe_math_optimizations
2985 && optimize_insn_for_speed_p ()
2986 && powi_cost (n/2) <= POWI_MAX_MULTS)
2987 /* Even the c == 0.5 case cannot be done unconditionally
2988 when we need to preserve signed zeros, as
2989 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
2990 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
2991 /* For c == 1.5 we can assume that x * sqrt (x) is always
2992 smaller than pow (x, 1.5) if sqrt will not be expanded
2993 as a call. */
2994 || (n == 3
2995 && (optab_handler (sqrt_optab, mode)->insn_code
2996 != CODE_FOR_nothing))))
2998 tree call_expr = build_call_nofold (fn, 1, narg0);
2999 /* Use expand_expr in case the newly built call expression
3000 was folded to a non-call. */
3001 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3002 if (n != 1)
3004 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3005 op2 = force_reg (mode, op2);
3006 op2 = expand_powi (op2, mode, abs (n / 2));
3007 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3008 0, OPTAB_LIB_WIDEN);
3009 /* If the original exponent was negative, reciprocate the
3010 result. */
3011 if (n < 0)
3012 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3013 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3015 return op;
3019 /* Try if the exponent is a third of an integer. In this case
3020 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3021 different from pow (x, 1./3.) due to rounding and behavior
3022 with negative x we need to constrain this transformation to
3023 unsafe math and positive x or finite math. */
3024 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3025 if (fn != NULL_TREE
3026 && flag_unsafe_math_optimizations
3027 && (tree_expr_nonnegative_p (arg0)
3028 || !HONOR_NANS (mode)))
3030 REAL_VALUE_TYPE dconst3;
3031 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3032 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3033 real_round (&c2, mode, &c2);
3034 n = real_to_integer (&c2);
3035 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3036 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3037 real_convert (&c2, mode, &c2);
3038 if (real_identical (&c2, &c)
3039 && ((optimize_insn_for_speed_p ()
3040 && powi_cost (n/3) <= POWI_MAX_MULTS)
3041 || n == 1))
3043 tree call_expr = build_call_nofold (fn, 1,narg0);
3044 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3045 if (abs (n) % 3 == 2)
3046 op = expand_simple_binop (mode, MULT, op, op, op,
3047 0, OPTAB_LIB_WIDEN);
3048 if (n != 1)
3050 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3051 op2 = force_reg (mode, op2);
3052 op2 = expand_powi (op2, mode, abs (n / 3));
3053 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3054 0, OPTAB_LIB_WIDEN);
3055 /* If the original exponent was negative, reciprocate the
3056 result. */
3057 if (n < 0)
3058 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3059 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3061 return op;
3065 /* Fall back to optab expansion. */
3066 return expand_builtin_mathfn_2 (exp, target, subtarget);
3069 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3070 a normal call should be emitted rather than expanding the function
3071 in-line. EXP is the expression that is a call to the builtin
3072 function; if convenient, the result should be placed in TARGET. */
3074 static rtx
3075 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3077 tree arg0, arg1;
3078 rtx op0, op1;
3079 enum machine_mode mode;
3080 enum machine_mode mode2;
3082 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3083 return NULL_RTX;
3085 arg0 = CALL_EXPR_ARG (exp, 0);
3086 arg1 = CALL_EXPR_ARG (exp, 1);
3087 mode = TYPE_MODE (TREE_TYPE (exp));
3089 /* Handle constant power. */
3091 if (TREE_CODE (arg1) == INTEGER_CST
3092 && !TREE_OVERFLOW (arg1))
3094 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3096 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3097 Otherwise, check the number of multiplications required. */
3098 if ((TREE_INT_CST_HIGH (arg1) == 0
3099 || TREE_INT_CST_HIGH (arg1) == -1)
3100 && ((n >= -1 && n <= 2)
3101 || (optimize_insn_for_speed_p ()
3102 && powi_cost (n) <= POWI_MAX_MULTS)))
3104 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3105 op0 = force_reg (mode, op0);
3106 return expand_powi (op0, mode, n);
3110 /* Emit a libcall to libgcc. */
3112 /* Mode of the 2nd argument must match that of an int. */
3113 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3115 if (target == NULL_RTX)
3116 target = gen_reg_rtx (mode);
3118 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3119 if (GET_MODE (op0) != mode)
3120 op0 = convert_to_mode (mode, op0, 0);
3121 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3122 if (GET_MODE (op1) != mode2)
3123 op1 = convert_to_mode (mode2, op1, 0);
3125 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3126 target, LCT_CONST, mode, 2,
3127 op0, mode, op1, mode2);
3129 return target;
3132 /* Expand expression EXP which is a call to the strlen builtin. Return
3133 NULL_RTX if we failed the caller should emit a normal call, otherwise
3134 try to get the result in TARGET, if convenient. */
3136 static rtx
3137 expand_builtin_strlen (tree exp, rtx target,
3138 enum machine_mode target_mode)
3140 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3141 return NULL_RTX;
3142 else
3144 rtx pat;
3145 tree len;
3146 tree src = CALL_EXPR_ARG (exp, 0);
3147 rtx result, src_reg, char_rtx, before_strlen;
3148 enum machine_mode insn_mode = target_mode, char_mode;
3149 enum insn_code icode = CODE_FOR_nothing;
3150 int align;
3152 /* If the length can be computed at compile-time, return it. */
3153 len = c_strlen (src, 0);
3154 if (len)
3155 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3157 /* If the length can be computed at compile-time and is constant
3158 integer, but there are side-effects in src, evaluate
3159 src for side-effects, then return len.
3160 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3161 can be optimized into: i++; x = 3; */
3162 len = c_strlen (src, 1);
3163 if (len && TREE_CODE (len) == INTEGER_CST)
3165 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3166 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3169 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3171 /* If SRC is not a pointer type, don't do this operation inline. */
3172 if (align == 0)
3173 return NULL_RTX;
3175 /* Bail out if we can't compute strlen in the right mode. */
3176 while (insn_mode != VOIDmode)
3178 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3179 if (icode != CODE_FOR_nothing)
3180 break;
3182 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3184 if (insn_mode == VOIDmode)
3185 return NULL_RTX;
3187 /* Make a place to write the result of the instruction. */
3188 result = target;
3189 if (! (result != 0
3190 && REG_P (result)
3191 && GET_MODE (result) == insn_mode
3192 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3193 result = gen_reg_rtx (insn_mode);
3195 /* Make a place to hold the source address. We will not expand
3196 the actual source until we are sure that the expansion will
3197 not fail -- there are trees that cannot be expanded twice. */
3198 src_reg = gen_reg_rtx (Pmode);
3200 /* Mark the beginning of the strlen sequence so we can emit the
3201 source operand later. */
3202 before_strlen = get_last_insn ();
3204 char_rtx = const0_rtx;
3205 char_mode = insn_data[(int) icode].operand[2].mode;
3206 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3207 char_mode))
3208 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3210 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3211 char_rtx, GEN_INT (align));
3212 if (! pat)
3213 return NULL_RTX;
3214 emit_insn (pat);
3216 /* Now that we are assured of success, expand the source. */
3217 start_sequence ();
3218 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3219 if (pat != src_reg)
3220 emit_move_insn (src_reg, pat);
3221 pat = get_insns ();
3222 end_sequence ();
3224 if (before_strlen)
3225 emit_insn_after (pat, before_strlen);
3226 else
3227 emit_insn_before (pat, get_insns ());
3229 /* Return the value in the proper mode for this function. */
3230 if (GET_MODE (result) == target_mode)
3231 target = result;
3232 else if (target != 0)
3233 convert_move (target, result, 0);
3234 else
3235 target = convert_to_mode (target_mode, result, 0);
3237 return target;
3241 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3242 bytes from constant string DATA + OFFSET and return it as target
3243 constant. */
3245 static rtx
3246 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3247 enum machine_mode mode)
3249 const char *str = (const char *) data;
3251 gcc_assert (offset >= 0
3252 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3253 <= strlen (str) + 1));
3255 return c_readstr (str + offset, mode);
3258 /* Expand a call EXP to the memcpy builtin.
3259 Return NULL_RTX if we failed, the caller should emit a normal call,
3260 otherwise try to get the result in TARGET, if convenient (and in
3261 mode MODE if that's convenient). */
3263 static rtx
3264 expand_builtin_memcpy (tree exp, rtx target)
3266 if (!validate_arglist (exp,
3267 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3268 return NULL_RTX;
3269 else
3271 tree dest = CALL_EXPR_ARG (exp, 0);
3272 tree src = CALL_EXPR_ARG (exp, 1);
3273 tree len = CALL_EXPR_ARG (exp, 2);
3274 const char *src_str;
3275 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3276 unsigned int dest_align
3277 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3278 rtx dest_mem, src_mem, dest_addr, len_rtx;
3279 HOST_WIDE_INT expected_size = -1;
3280 unsigned int expected_align = 0;
3282 /* If DEST is not a pointer type, call the normal function. */
3283 if (dest_align == 0)
3284 return NULL_RTX;
3286 /* If either SRC is not a pointer type, don't do this
3287 operation in-line. */
3288 if (src_align == 0)
3289 return NULL_RTX;
3291 if (currently_expanding_gimple_stmt)
3292 stringop_block_profile (currently_expanding_gimple_stmt,
3293 &expected_align, &expected_size);
3295 if (expected_align < dest_align)
3296 expected_align = dest_align;
3297 dest_mem = get_memory_rtx (dest, len);
3298 set_mem_align (dest_mem, dest_align);
3299 len_rtx = expand_normal (len);
3300 src_str = c_getstr (src);
3302 /* If SRC is a string constant and block move would be done
3303 by pieces, we can avoid loading the string from memory
3304 and only stored the computed constants. */
3305 if (src_str
3306 && CONST_INT_P (len_rtx)
3307 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3308 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3309 CONST_CAST (char *, src_str),
3310 dest_align, false))
3312 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3313 builtin_memcpy_read_str,
3314 CONST_CAST (char *, src_str),
3315 dest_align, false, 0);
3316 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3317 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3318 return dest_mem;
3321 src_mem = get_memory_rtx (src, len);
3322 set_mem_align (src_mem, src_align);
3324 /* Copy word part most expediently. */
3325 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3326 CALL_EXPR_TAILCALL (exp)
3327 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3328 expected_align, expected_size);
3330 if (dest_addr == 0)
3332 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3333 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3335 return dest_addr;
3339 /* Expand a call EXP to the mempcpy builtin.
3340 Return NULL_RTX if we failed; the caller should emit a normal call,
3341 otherwise try to get the result in TARGET, if convenient (and in
3342 mode MODE if that's convenient). If ENDP is 0 return the
3343 destination pointer, if ENDP is 1 return the end pointer ala
3344 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3345 stpcpy. */
3347 static rtx
3348 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3350 if (!validate_arglist (exp,
3351 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3352 return NULL_RTX;
3353 else
3355 tree dest = CALL_EXPR_ARG (exp, 0);
3356 tree src = CALL_EXPR_ARG (exp, 1);
3357 tree len = CALL_EXPR_ARG (exp, 2);
3358 return expand_builtin_mempcpy_args (dest, src, len,
3359 target, mode, /*endp=*/ 1);
3363 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3364 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3365 so that this can also be called without constructing an actual CALL_EXPR.
3366 The other arguments and return value are the same as for
3367 expand_builtin_mempcpy. */
3369 static rtx
3370 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3371 rtx target, enum machine_mode mode, int endp)
3373 /* If return value is ignored, transform mempcpy into memcpy. */
3374 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3376 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3377 tree result = build_call_nofold (fn, 3, dest, src, len);
3378 return expand_expr (result, target, mode, EXPAND_NORMAL);
3380 else
3382 const char *src_str;
3383 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3384 unsigned int dest_align
3385 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3386 rtx dest_mem, src_mem, len_rtx;
3388 /* If either SRC or DEST is not a pointer type, don't do this
3389 operation in-line. */
3390 if (dest_align == 0 || src_align == 0)
3391 return NULL_RTX;
3393 /* If LEN is not constant, call the normal function. */
3394 if (! host_integerp (len, 1))
3395 return NULL_RTX;
3397 len_rtx = expand_normal (len);
3398 src_str = c_getstr (src);
3400 /* If SRC is a string constant and block move would be done
3401 by pieces, we can avoid loading the string from memory
3402 and only stored the computed constants. */
3403 if (src_str
3404 && CONST_INT_P (len_rtx)
3405 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3406 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3407 CONST_CAST (char *, src_str),
3408 dest_align, false))
3410 dest_mem = get_memory_rtx (dest, len);
3411 set_mem_align (dest_mem, dest_align);
3412 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3413 builtin_memcpy_read_str,
3414 CONST_CAST (char *, src_str),
3415 dest_align, false, endp);
3416 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3417 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3418 return dest_mem;
3421 if (CONST_INT_P (len_rtx)
3422 && can_move_by_pieces (INTVAL (len_rtx),
3423 MIN (dest_align, src_align)))
3425 dest_mem = get_memory_rtx (dest, len);
3426 set_mem_align (dest_mem, dest_align);
3427 src_mem = get_memory_rtx (src, len);
3428 set_mem_align (src_mem, src_align);
3429 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3430 MIN (dest_align, src_align), endp);
3431 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3432 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3433 return dest_mem;
3436 return NULL_RTX;
3440 #ifndef HAVE_movstr
3441 # define HAVE_movstr 0
3442 # define CODE_FOR_movstr CODE_FOR_nothing
3443 #endif
3445 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3446 we failed, the caller should emit a normal call, otherwise try to
3447 get the result in TARGET, if convenient. If ENDP is 0 return the
3448 destination pointer, if ENDP is 1 return the end pointer ala
3449 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3450 stpcpy. */
3452 static rtx
3453 expand_movstr (tree dest, tree src, rtx target, int endp)
3455 rtx end;
3456 rtx dest_mem;
3457 rtx src_mem;
3458 rtx insn;
3459 const struct insn_data * data;
3461 if (!HAVE_movstr)
3462 return NULL_RTX;
3464 dest_mem = get_memory_rtx (dest, NULL);
3465 src_mem = get_memory_rtx (src, NULL);
3466 if (!endp)
3468 target = force_reg (Pmode, XEXP (dest_mem, 0));
3469 dest_mem = replace_equiv_address (dest_mem, target);
3470 end = gen_reg_rtx (Pmode);
3472 else
3474 if (target == 0 || target == const0_rtx)
3476 end = gen_reg_rtx (Pmode);
3477 if (target == 0)
3478 target = end;
3480 else
3481 end = target;
3484 data = insn_data + CODE_FOR_movstr;
3486 if (data->operand[0].mode != VOIDmode)
3487 end = gen_lowpart (data->operand[0].mode, end);
3489 insn = data->genfun (end, dest_mem, src_mem);
3491 gcc_assert (insn);
3493 emit_insn (insn);
3495 /* movstr is supposed to set end to the address of the NUL
3496 terminator. If the caller requested a mempcpy-like return value,
3497 adjust it. */
3498 if (endp == 1 && target != const0_rtx)
3500 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3501 emit_move_insn (target, force_operand (tem, NULL_RTX));
3504 return target;
3507 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3508 NULL_RTX if we failed the caller should emit a normal call, otherwise
3509 try to get the result in TARGET, if convenient (and in mode MODE if that's
3510 convenient). */
3512 static rtx
3513 expand_builtin_strcpy (tree exp, rtx target)
3515 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3517 tree dest = CALL_EXPR_ARG (exp, 0);
3518 tree src = CALL_EXPR_ARG (exp, 1);
3519 return expand_builtin_strcpy_args (dest, src, target);
3521 return NULL_RTX;
3524 /* Helper function to do the actual work for expand_builtin_strcpy. The
3525 arguments to the builtin_strcpy call DEST and SRC are broken out
3526 so that this can also be called without constructing an actual CALL_EXPR.
3527 The other arguments and return value are the same as for
3528 expand_builtin_strcpy. */
3530 static rtx
3531 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3533 return expand_movstr (dest, src, target, /*endp=*/0);
3536 /* Expand a call EXP to the stpcpy builtin.
3537 Return NULL_RTX if we failed the caller should emit a normal call,
3538 otherwise try to get the result in TARGET, if convenient (and in
3539 mode MODE if that's convenient). */
3541 static rtx
3542 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3544 tree dst, src;
3545 location_t loc = EXPR_LOCATION (exp);
3547 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3548 return NULL_RTX;
3550 dst = CALL_EXPR_ARG (exp, 0);
3551 src = CALL_EXPR_ARG (exp, 1);
3553 /* If return value is ignored, transform stpcpy into strcpy. */
3554 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3556 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3557 tree result = build_call_nofold (fn, 2, dst, src);
3558 return expand_expr (result, target, mode, EXPAND_NORMAL);
3560 else
3562 tree len, lenp1;
3563 rtx ret;
3565 /* Ensure we get an actual string whose length can be evaluated at
3566 compile-time, not an expression containing a string. This is
3567 because the latter will potentially produce pessimized code
3568 when used to produce the return value. */
3569 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3570 return expand_movstr (dst, src, target, /*endp=*/2);
3572 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3573 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3574 target, mode, /*endp=*/2);
3576 if (ret)
3577 return ret;
3579 if (TREE_CODE (len) == INTEGER_CST)
3581 rtx len_rtx = expand_normal (len);
3583 if (CONST_INT_P (len_rtx))
3585 ret = expand_builtin_strcpy_args (dst, src, target);
3587 if (ret)
3589 if (! target)
3591 if (mode != VOIDmode)
3592 target = gen_reg_rtx (mode);
3593 else
3594 target = gen_reg_rtx (GET_MODE (ret));
3596 if (GET_MODE (target) != GET_MODE (ret))
3597 ret = gen_lowpart (GET_MODE (target), ret);
3599 ret = plus_constant (ret, INTVAL (len_rtx));
3600 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3601 gcc_assert (ret);
3603 return target;
3608 return expand_movstr (dst, src, target, /*endp=*/2);
3612 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3613 bytes from constant string DATA + OFFSET and return it as target
3614 constant. */
3617 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3618 enum machine_mode mode)
3620 const char *str = (const char *) data;
3622 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3623 return const0_rtx;
3625 return c_readstr (str + offset, mode);
3628 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3629 NULL_RTX if we failed the caller should emit a normal call. */
3631 static rtx
3632 expand_builtin_strncpy (tree exp, rtx target)
3634 location_t loc = EXPR_LOCATION (exp);
3636 if (validate_arglist (exp,
3637 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3639 tree dest = CALL_EXPR_ARG (exp, 0);
3640 tree src = CALL_EXPR_ARG (exp, 1);
3641 tree len = CALL_EXPR_ARG (exp, 2);
3642 tree slen = c_strlen (src, 1);
3644 /* We must be passed a constant len and src parameter. */
3645 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3646 return NULL_RTX;
3648 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3650 /* We're required to pad with trailing zeros if the requested
3651 len is greater than strlen(s2)+1. In that case try to
3652 use store_by_pieces, if it fails, punt. */
3653 if (tree_int_cst_lt (slen, len))
3655 unsigned int dest_align
3656 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3657 const char *p = c_getstr (src);
3658 rtx dest_mem;
3660 if (!p || dest_align == 0 || !host_integerp (len, 1)
3661 || !can_store_by_pieces (tree_low_cst (len, 1),
3662 builtin_strncpy_read_str,
3663 CONST_CAST (char *, p),
3664 dest_align, false))
3665 return NULL_RTX;
3667 dest_mem = get_memory_rtx (dest, len);
3668 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3669 builtin_strncpy_read_str,
3670 CONST_CAST (char *, p), dest_align, false, 0);
3671 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3672 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3673 return dest_mem;
3676 return NULL_RTX;
3679 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3680 bytes from constant string DATA + OFFSET and return it as target
3681 constant. */
3684 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3685 enum machine_mode mode)
3687 const char *c = (const char *) data;
3688 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3690 memset (p, *c, GET_MODE_SIZE (mode));
3692 return c_readstr (p, mode);
3695 /* Callback routine for store_by_pieces. Return the RTL of a register
3696 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3697 char value given in the RTL register data. For example, if mode is
3698 4 bytes wide, return the RTL for 0x01010101*data. */
3700 static rtx
3701 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3702 enum machine_mode mode)
3704 rtx target, coeff;
3705 size_t size;
3706 char *p;
3708 size = GET_MODE_SIZE (mode);
3709 if (size == 1)
3710 return (rtx) data;
3712 p = XALLOCAVEC (char, size);
3713 memset (p, 1, size);
3714 coeff = c_readstr (p, mode);
3716 target = convert_to_mode (mode, (rtx) data, 1);
3717 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3718 return force_reg (mode, target);
3721 /* Expand expression EXP, which is a call to the memset builtin. Return
3722 NULL_RTX if we failed the caller should emit a normal call, otherwise
3723 try to get the result in TARGET, if convenient (and in mode MODE if that's
3724 convenient). */
3726 static rtx
3727 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3729 if (!validate_arglist (exp,
3730 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3731 return NULL_RTX;
3732 else
3734 tree dest = CALL_EXPR_ARG (exp, 0);
3735 tree val = CALL_EXPR_ARG (exp, 1);
3736 tree len = CALL_EXPR_ARG (exp, 2);
3737 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3741 /* Helper function to do the actual work for expand_builtin_memset. The
3742 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3743 so that this can also be called without constructing an actual CALL_EXPR.
3744 The other arguments and return value are the same as for
3745 expand_builtin_memset. */
3747 static rtx
3748 expand_builtin_memset_args (tree dest, tree val, tree len,
3749 rtx target, enum machine_mode mode, tree orig_exp)
3751 tree fndecl, fn;
3752 enum built_in_function fcode;
3753 char c;
3754 unsigned int dest_align;
3755 rtx dest_mem, dest_addr, len_rtx;
3756 HOST_WIDE_INT expected_size = -1;
3757 unsigned int expected_align = 0;
3759 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3761 /* If DEST is not a pointer type, don't do this operation in-line. */
3762 if (dest_align == 0)
3763 return NULL_RTX;
3765 if (currently_expanding_gimple_stmt)
3766 stringop_block_profile (currently_expanding_gimple_stmt,
3767 &expected_align, &expected_size);
3769 if (expected_align < dest_align)
3770 expected_align = dest_align;
3772 /* If the LEN parameter is zero, return DEST. */
3773 if (integer_zerop (len))
3775 /* Evaluate and ignore VAL in case it has side-effects. */
3776 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3777 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3780 /* Stabilize the arguments in case we fail. */
3781 dest = builtin_save_expr (dest);
3782 val = builtin_save_expr (val);
3783 len = builtin_save_expr (len);
3785 len_rtx = expand_normal (len);
3786 dest_mem = get_memory_rtx (dest, len);
3788 if (TREE_CODE (val) != INTEGER_CST)
3790 rtx val_rtx;
3792 val_rtx = expand_normal (val);
3793 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3794 val_rtx, 0);
3796 /* Assume that we can memset by pieces if we can store
3797 * the coefficients by pieces (in the required modes).
3798 * We can't pass builtin_memset_gen_str as that emits RTL. */
3799 c = 1;
3800 if (host_integerp (len, 1)
3801 && can_store_by_pieces (tree_low_cst (len, 1),
3802 builtin_memset_read_str, &c, dest_align,
3803 true))
3805 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3806 val_rtx);
3807 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3808 builtin_memset_gen_str, val_rtx, dest_align,
3809 true, 0);
3811 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3812 dest_align, expected_align,
3813 expected_size))
3814 goto do_libcall;
3816 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3817 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3818 return dest_mem;
3821 if (target_char_cast (val, &c))
3822 goto do_libcall;
3824 if (c)
3826 if (host_integerp (len, 1)
3827 && can_store_by_pieces (tree_low_cst (len, 1),
3828 builtin_memset_read_str, &c, dest_align,
3829 true))
3830 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3831 builtin_memset_read_str, &c, dest_align, true, 0);
3832 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3833 dest_align, expected_align,
3834 expected_size))
3835 goto do_libcall;
3837 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3838 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3839 return dest_mem;
3842 set_mem_align (dest_mem, dest_align);
3843 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3844 CALL_EXPR_TAILCALL (orig_exp)
3845 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3846 expected_align, expected_size);
3848 if (dest_addr == 0)
3850 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3851 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3854 return dest_addr;
3856 do_libcall:
3857 fndecl = get_callee_fndecl (orig_exp);
3858 fcode = DECL_FUNCTION_CODE (fndecl);
3859 if (fcode == BUILT_IN_MEMSET)
3860 fn = build_call_nofold (fndecl, 3, dest, val, len);
3861 else if (fcode == BUILT_IN_BZERO)
3862 fn = build_call_nofold (fndecl, 2, dest, len);
3863 else
3864 gcc_unreachable ();
3865 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3866 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3867 return expand_call (fn, target, target == const0_rtx);
3870 /* Expand expression EXP, which is a call to the bzero builtin. Return
3871 NULL_RTX if we failed the caller should emit a normal call. */
3873 static rtx
3874 expand_builtin_bzero (tree exp)
3876 tree dest, size;
3877 location_t loc = EXPR_LOCATION (exp);
3879 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3880 return NULL_RTX;
3882 dest = CALL_EXPR_ARG (exp, 0);
3883 size = CALL_EXPR_ARG (exp, 1);
3885 /* New argument list transforming bzero(ptr x, int y) to
3886 memset(ptr x, int 0, size_t y). This is done this way
3887 so that if it isn't expanded inline, we fallback to
3888 calling bzero instead of memset. */
3890 return expand_builtin_memset_args (dest, integer_zero_node,
3891 fold_convert_loc (loc, sizetype, size),
3892 const0_rtx, VOIDmode, exp);
3895 /* Expand expression EXP, which is a call to the memcmp built-in function.
3896 Return NULL_RTX if we failed and the
3897 caller should emit a normal call, otherwise try to get the result in
3898 TARGET, if convenient (and in mode MODE, if that's convenient). */
3900 static rtx
3901 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3902 ATTRIBUTE_UNUSED enum machine_mode mode)
3904 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3906 if (!validate_arglist (exp,
3907 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3908 return NULL_RTX;
3910 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3912 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3913 rtx result;
3914 rtx insn;
3915 tree arg1 = CALL_EXPR_ARG (exp, 0);
3916 tree arg2 = CALL_EXPR_ARG (exp, 1);
3917 tree len = CALL_EXPR_ARG (exp, 2);
3919 int arg1_align
3920 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3921 int arg2_align
3922 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3923 enum machine_mode insn_mode;
3925 #ifdef HAVE_cmpmemsi
3926 if (HAVE_cmpmemsi)
3927 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3928 else
3929 #endif
3930 #ifdef HAVE_cmpstrnsi
3931 if (HAVE_cmpstrnsi)
3932 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3933 else
3934 #endif
3935 return NULL_RTX;
3937 /* If we don't have POINTER_TYPE, call the function. */
3938 if (arg1_align == 0 || arg2_align == 0)
3939 return NULL_RTX;
3941 /* Make a place to write the result of the instruction. */
3942 result = target;
3943 if (! (result != 0
3944 && REG_P (result) && GET_MODE (result) == insn_mode
3945 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3946 result = gen_reg_rtx (insn_mode);
3948 arg1_rtx = get_memory_rtx (arg1, len);
3949 arg2_rtx = get_memory_rtx (arg2, len);
3950 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3952 /* Set MEM_SIZE as appropriate. */
3953 if (CONST_INT_P (arg3_rtx))
3955 set_mem_size (arg1_rtx, arg3_rtx);
3956 set_mem_size (arg2_rtx, arg3_rtx);
3959 #ifdef HAVE_cmpmemsi
3960 if (HAVE_cmpmemsi)
3961 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3962 GEN_INT (MIN (arg1_align, arg2_align)));
3963 else
3964 #endif
3965 #ifdef HAVE_cmpstrnsi
3966 if (HAVE_cmpstrnsi)
3967 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3968 GEN_INT (MIN (arg1_align, arg2_align)));
3969 else
3970 #endif
3971 gcc_unreachable ();
3973 if (insn)
3974 emit_insn (insn);
3975 else
3976 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3977 TYPE_MODE (integer_type_node), 3,
3978 XEXP (arg1_rtx, 0), Pmode,
3979 XEXP (arg2_rtx, 0), Pmode,
3980 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3981 TYPE_UNSIGNED (sizetype)),
3982 TYPE_MODE (sizetype));
3984 /* Return the value in the proper mode for this function. */
3985 mode = TYPE_MODE (TREE_TYPE (exp));
3986 if (GET_MODE (result) == mode)
3987 return result;
3988 else if (target != 0)
3990 convert_move (target, result, 0);
3991 return target;
3993 else
3994 return convert_to_mode (mode, result, 0);
3996 #endif
3998 return NULL_RTX;
4001 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4002 if we failed the caller should emit a normal call, otherwise try to get
4003 the result in TARGET, if convenient. */
4005 static rtx
4006 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4008 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4009 return NULL_RTX;
4011 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4012 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4013 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4015 rtx arg1_rtx, arg2_rtx;
4016 rtx result, insn = NULL_RTX;
4017 tree fndecl, fn;
4018 tree arg1 = CALL_EXPR_ARG (exp, 0);
4019 tree arg2 = CALL_EXPR_ARG (exp, 1);
4021 int arg1_align
4022 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4023 int arg2_align
4024 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4026 /* If we don't have POINTER_TYPE, call the function. */
4027 if (arg1_align == 0 || arg2_align == 0)
4028 return NULL_RTX;
4030 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4031 arg1 = builtin_save_expr (arg1);
4032 arg2 = builtin_save_expr (arg2);
4034 arg1_rtx = get_memory_rtx (arg1, NULL);
4035 arg2_rtx = get_memory_rtx (arg2, NULL);
4037 #ifdef HAVE_cmpstrsi
4038 /* Try to call cmpstrsi. */
4039 if (HAVE_cmpstrsi)
4041 enum machine_mode insn_mode
4042 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4044 /* Make a place to write the result of the instruction. */
4045 result = target;
4046 if (! (result != 0
4047 && REG_P (result) && GET_MODE (result) == insn_mode
4048 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4049 result = gen_reg_rtx (insn_mode);
4051 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4052 GEN_INT (MIN (arg1_align, arg2_align)));
4054 #endif
4055 #ifdef HAVE_cmpstrnsi
4056 /* Try to determine at least one length and call cmpstrnsi. */
4057 if (!insn && HAVE_cmpstrnsi)
4059 tree len;
4060 rtx arg3_rtx;
4062 enum machine_mode insn_mode
4063 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4064 tree len1 = c_strlen (arg1, 1);
4065 tree len2 = c_strlen (arg2, 1);
4067 if (len1)
4068 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4069 if (len2)
4070 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4072 /* If we don't have a constant length for the first, use the length
4073 of the second, if we know it. We don't require a constant for
4074 this case; some cost analysis could be done if both are available
4075 but neither is constant. For now, assume they're equally cheap,
4076 unless one has side effects. If both strings have constant lengths,
4077 use the smaller. */
4079 if (!len1)
4080 len = len2;
4081 else if (!len2)
4082 len = len1;
4083 else if (TREE_SIDE_EFFECTS (len1))
4084 len = len2;
4085 else if (TREE_SIDE_EFFECTS (len2))
4086 len = len1;
4087 else if (TREE_CODE (len1) != INTEGER_CST)
4088 len = len2;
4089 else if (TREE_CODE (len2) != INTEGER_CST)
4090 len = len1;
4091 else if (tree_int_cst_lt (len1, len2))
4092 len = len1;
4093 else
4094 len = len2;
4096 /* If both arguments have side effects, we cannot optimize. */
4097 if (!len || TREE_SIDE_EFFECTS (len))
4098 goto do_libcall;
4100 arg3_rtx = expand_normal (len);
4102 /* Make a place to write the result of the instruction. */
4103 result = target;
4104 if (! (result != 0
4105 && REG_P (result) && GET_MODE (result) == insn_mode
4106 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4107 result = gen_reg_rtx (insn_mode);
4109 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4110 GEN_INT (MIN (arg1_align, arg2_align)));
4112 #endif
4114 if (insn)
4116 enum machine_mode mode;
4117 emit_insn (insn);
4119 /* Return the value in the proper mode for this function. */
4120 mode = TYPE_MODE (TREE_TYPE (exp));
4121 if (GET_MODE (result) == mode)
4122 return result;
4123 if (target == 0)
4124 return convert_to_mode (mode, result, 0);
4125 convert_move (target, result, 0);
4126 return target;
4129 /* Expand the library call ourselves using a stabilized argument
4130 list to avoid re-evaluating the function's arguments twice. */
4131 #ifdef HAVE_cmpstrnsi
4132 do_libcall:
4133 #endif
4134 fndecl = get_callee_fndecl (exp);
4135 fn = build_call_nofold (fndecl, 2, arg1, arg2);
4136 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4137 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4138 return expand_call (fn, target, target == const0_rtx);
4140 #endif
4141 return NULL_RTX;
4144 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4145 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4146 the result in TARGET, if convenient. */
4148 static rtx
4149 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4150 ATTRIBUTE_UNUSED enum machine_mode mode)
4152 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4154 if (!validate_arglist (exp,
4155 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4156 return NULL_RTX;
4158 /* If c_strlen can determine an expression for one of the string
4159 lengths, and it doesn't have side effects, then emit cmpstrnsi
4160 using length MIN(strlen(string)+1, arg3). */
4161 #ifdef HAVE_cmpstrnsi
4162 if (HAVE_cmpstrnsi)
4164 tree len, len1, len2;
4165 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4166 rtx result, insn;
4167 tree fndecl, fn;
4168 tree arg1 = CALL_EXPR_ARG (exp, 0);
4169 tree arg2 = CALL_EXPR_ARG (exp, 1);
4170 tree arg3 = CALL_EXPR_ARG (exp, 2);
4172 int arg1_align
4173 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4174 int arg2_align
4175 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4176 enum machine_mode insn_mode
4177 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4179 len1 = c_strlen (arg1, 1);
4180 len2 = c_strlen (arg2, 1);
4182 if (len1)
4183 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4184 if (len2)
4185 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4187 /* If we don't have a constant length for the first, use the length
4188 of the second, if we know it. We don't require a constant for
4189 this case; some cost analysis could be done if both are available
4190 but neither is constant. For now, assume they're equally cheap,
4191 unless one has side effects. If both strings have constant lengths,
4192 use the smaller. */
4194 if (!len1)
4195 len = len2;
4196 else if (!len2)
4197 len = len1;
4198 else if (TREE_SIDE_EFFECTS (len1))
4199 len = len2;
4200 else if (TREE_SIDE_EFFECTS (len2))
4201 len = len1;
4202 else if (TREE_CODE (len1) != INTEGER_CST)
4203 len = len2;
4204 else if (TREE_CODE (len2) != INTEGER_CST)
4205 len = len1;
4206 else if (tree_int_cst_lt (len1, len2))
4207 len = len1;
4208 else
4209 len = len2;
4211 /* If both arguments have side effects, we cannot optimize. */
4212 if (!len || TREE_SIDE_EFFECTS (len))
4213 return NULL_RTX;
4215 /* The actual new length parameter is MIN(len,arg3). */
4216 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4217 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4219 /* If we don't have POINTER_TYPE, call the function. */
4220 if (arg1_align == 0 || arg2_align == 0)
4221 return NULL_RTX;
4223 /* Make a place to write the result of the instruction. */
4224 result = target;
4225 if (! (result != 0
4226 && REG_P (result) && GET_MODE (result) == insn_mode
4227 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4228 result = gen_reg_rtx (insn_mode);
4230 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4231 arg1 = builtin_save_expr (arg1);
4232 arg2 = builtin_save_expr (arg2);
4233 len = builtin_save_expr (len);
4235 arg1_rtx = get_memory_rtx (arg1, len);
4236 arg2_rtx = get_memory_rtx (arg2, len);
4237 arg3_rtx = expand_normal (len);
4238 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4239 GEN_INT (MIN (arg1_align, arg2_align)));
4240 if (insn)
4242 emit_insn (insn);
4244 /* Return the value in the proper mode for this function. */
4245 mode = TYPE_MODE (TREE_TYPE (exp));
4246 if (GET_MODE (result) == mode)
4247 return result;
4248 if (target == 0)
4249 return convert_to_mode (mode, result, 0);
4250 convert_move (target, result, 0);
4251 return target;
4254 /* Expand the library call ourselves using a stabilized argument
4255 list to avoid re-evaluating the function's arguments twice. */
4256 fndecl = get_callee_fndecl (exp);
4257 fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
4258 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4259 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4260 return expand_call (fn, target, target == const0_rtx);
4262 #endif
4263 return NULL_RTX;
4266 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4267 if that's convenient. */
4270 expand_builtin_saveregs (void)
4272 rtx val, seq;
4274 /* Don't do __builtin_saveregs more than once in a function.
4275 Save the result of the first call and reuse it. */
4276 if (saveregs_value != 0)
4277 return saveregs_value;
4279 /* When this function is called, it means that registers must be
4280 saved on entry to this function. So we migrate the call to the
4281 first insn of this function. */
4283 start_sequence ();
4285 /* Do whatever the machine needs done in this case. */
4286 val = targetm.calls.expand_builtin_saveregs ();
4288 seq = get_insns ();
4289 end_sequence ();
4291 saveregs_value = val;
4293 /* Put the insns after the NOTE that starts the function. If this
4294 is inside a start_sequence, make the outer-level insn chain current, so
4295 the code is placed at the start of the function. */
4296 push_topmost_sequence ();
4297 emit_insn_after (seq, entry_of_function ());
4298 pop_topmost_sequence ();
4300 return val;
4303 /* __builtin_args_info (N) returns word N of the arg space info
4304 for the current function. The number and meanings of words
4305 is controlled by the definition of CUMULATIVE_ARGS. */
4307 static rtx
4308 expand_builtin_args_info (tree exp)
4310 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4311 int *word_ptr = (int *) &crtl->args.info;
4313 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4315 if (call_expr_nargs (exp) != 0)
4317 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4318 error ("argument of %<__builtin_args_info%> must be constant");
4319 else
4321 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4323 if (wordnum < 0 || wordnum >= nwords)
4324 error ("argument of %<__builtin_args_info%> out of range");
4325 else
4326 return GEN_INT (word_ptr[wordnum]);
4329 else
4330 error ("missing argument in %<__builtin_args_info%>");
4332 return const0_rtx;
4335 /* Expand a call to __builtin_next_arg. */
4337 static rtx
4338 expand_builtin_next_arg (void)
4340 /* Checking arguments is already done in fold_builtin_next_arg
4341 that must be called before this function. */
4342 return expand_binop (ptr_mode, add_optab,
4343 crtl->args.internal_arg_pointer,
4344 crtl->args.arg_offset_rtx,
4345 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4348 /* Make it easier for the backends by protecting the valist argument
4349 from multiple evaluations. */
4351 static tree
4352 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4354 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4356 gcc_assert (vatype != NULL_TREE);
4358 if (TREE_CODE (vatype) == ARRAY_TYPE)
4360 if (TREE_SIDE_EFFECTS (valist))
4361 valist = save_expr (valist);
4363 /* For this case, the backends will be expecting a pointer to
4364 vatype, but it's possible we've actually been given an array
4365 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4366 So fix it. */
4367 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4369 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4370 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4373 else
4375 tree pt;
4377 if (! needs_lvalue)
4379 if (! TREE_SIDE_EFFECTS (valist))
4380 return valist;
4382 pt = build_pointer_type (vatype);
4383 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4384 TREE_SIDE_EFFECTS (valist) = 1;
4387 if (TREE_SIDE_EFFECTS (valist))
4388 valist = save_expr (valist);
4389 valist = build_fold_indirect_ref_loc (loc, valist);
4392 return valist;
4395 /* The "standard" definition of va_list is void*. */
4397 tree
4398 std_build_builtin_va_list (void)
4400 return ptr_type_node;
4403 /* The "standard" abi va_list is va_list_type_node. */
4405 tree
4406 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4408 return va_list_type_node;
4411 /* The "standard" type of va_list is va_list_type_node. */
4413 tree
4414 std_canonical_va_list_type (tree type)
4416 tree wtype, htype;
4418 if (INDIRECT_REF_P (type))
4419 type = TREE_TYPE (type);
4420 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4421 type = TREE_TYPE (type);
4422 wtype = va_list_type_node;
4423 htype = type;
4424 /* Treat structure va_list types. */
4425 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4426 htype = TREE_TYPE (htype);
4427 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4429 /* If va_list is an array type, the argument may have decayed
4430 to a pointer type, e.g. by being passed to another function.
4431 In that case, unwrap both types so that we can compare the
4432 underlying records. */
4433 if (TREE_CODE (htype) == ARRAY_TYPE
4434 || POINTER_TYPE_P (htype))
4436 wtype = TREE_TYPE (wtype);
4437 htype = TREE_TYPE (htype);
4440 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4441 return va_list_type_node;
4443 return NULL_TREE;
4446 /* The "standard" implementation of va_start: just assign `nextarg' to
4447 the variable. */
4449 void
4450 std_expand_builtin_va_start (tree valist, rtx nextarg)
4452 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4453 convert_move (va_r, nextarg, 0);
4456 /* Expand EXP, a call to __builtin_va_start. */
4458 static rtx
4459 expand_builtin_va_start (tree exp)
4461 rtx nextarg;
4462 tree valist;
4463 location_t loc = EXPR_LOCATION (exp);
4465 if (call_expr_nargs (exp) < 2)
4467 error_at (loc, "too few arguments to function %<va_start%>");
4468 return const0_rtx;
4471 if (fold_builtin_next_arg (exp, true))
4472 return const0_rtx;
4474 nextarg = expand_builtin_next_arg ();
4475 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4477 if (targetm.expand_builtin_va_start)
4478 targetm.expand_builtin_va_start (valist, nextarg);
4479 else
4480 std_expand_builtin_va_start (valist, nextarg);
4482 return const0_rtx;
4485 /* The "standard" implementation of va_arg: read the value from the
4486 current (padded) address and increment by the (padded) size. */
4488 tree
4489 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4490 gimple_seq *post_p)
4492 tree addr, t, type_size, rounded_size, valist_tmp;
4493 unsigned HOST_WIDE_INT align, boundary;
4494 bool indirect;
4496 #ifdef ARGS_GROW_DOWNWARD
4497 /* All of the alignment and movement below is for args-grow-up machines.
4498 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4499 implement their own specialized gimplify_va_arg_expr routines. */
4500 gcc_unreachable ();
4501 #endif
4503 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4504 if (indirect)
4505 type = build_pointer_type (type);
4507 align = PARM_BOUNDARY / BITS_PER_UNIT;
4508 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4510 /* When we align parameter on stack for caller, if the parameter
4511 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4512 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4513 here with caller. */
4514 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4515 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4517 boundary /= BITS_PER_UNIT;
4519 /* Hoist the valist value into a temporary for the moment. */
4520 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4522 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4523 requires greater alignment, we must perform dynamic alignment. */
4524 if (boundary > align
4525 && !integer_zerop (TYPE_SIZE (type)))
4527 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4528 fold_build2 (POINTER_PLUS_EXPR,
4529 TREE_TYPE (valist),
4530 valist_tmp, size_int (boundary - 1)));
4531 gimplify_and_add (t, pre_p);
4533 t = fold_convert (sizetype, valist_tmp);
4534 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4535 fold_convert (TREE_TYPE (valist),
4536 fold_build2 (BIT_AND_EXPR, sizetype, t,
4537 size_int (-boundary))));
4538 gimplify_and_add (t, pre_p);
4540 else
4541 boundary = align;
4543 /* If the actual alignment is less than the alignment of the type,
4544 adjust the type accordingly so that we don't assume strict alignment
4545 when dereferencing the pointer. */
4546 boundary *= BITS_PER_UNIT;
4547 if (boundary < TYPE_ALIGN (type))
4549 type = build_variant_type_copy (type);
4550 TYPE_ALIGN (type) = boundary;
4553 /* Compute the rounded size of the type. */
4554 type_size = size_in_bytes (type);
4555 rounded_size = round_up (type_size, align);
4557 /* Reduce rounded_size so it's sharable with the postqueue. */
4558 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4560 /* Get AP. */
4561 addr = valist_tmp;
4562 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4564 /* Small args are padded downward. */
4565 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4566 rounded_size, size_int (align));
4567 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4568 size_binop (MINUS_EXPR, rounded_size, type_size));
4569 addr = fold_build2 (POINTER_PLUS_EXPR,
4570 TREE_TYPE (addr), addr, t);
4573 /* Compute new value for AP. */
4574 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4575 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4576 gimplify_and_add (t, pre_p);
4578 addr = fold_convert (build_pointer_type (type), addr);
4580 if (indirect)
4581 addr = build_va_arg_indirect_ref (addr);
4583 return build_va_arg_indirect_ref (addr);
4586 /* Build an indirect-ref expression over the given TREE, which represents a
4587 piece of a va_arg() expansion. */
4588 tree
4589 build_va_arg_indirect_ref (tree addr)
4591 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4593 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4594 mf_mark (addr);
4596 return addr;
4599 /* Return a dummy expression of type TYPE in order to keep going after an
4600 error. */
4602 static tree
4603 dummy_object (tree type)
4605 tree t = build_int_cst (build_pointer_type (type), 0);
4606 return build1 (INDIRECT_REF, type, t);
4609 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4610 builtin function, but a very special sort of operator. */
4612 enum gimplify_status
4613 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4615 tree promoted_type, have_va_type;
4616 tree valist = TREE_OPERAND (*expr_p, 0);
4617 tree type = TREE_TYPE (*expr_p);
4618 tree t;
4619 location_t loc = EXPR_LOCATION (*expr_p);
4621 /* Verify that valist is of the proper type. */
4622 have_va_type = TREE_TYPE (valist);
4623 if (have_va_type == error_mark_node)
4624 return GS_ERROR;
4625 have_va_type = targetm.canonical_va_list_type (have_va_type);
4627 if (have_va_type == NULL_TREE)
4629 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4630 return GS_ERROR;
4633 /* Generate a diagnostic for requesting data of a type that cannot
4634 be passed through `...' due to type promotion at the call site. */
4635 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4636 != type)
4638 static bool gave_help;
4639 bool warned;
4641 /* Unfortunately, this is merely undefined, rather than a constraint
4642 violation, so we cannot make this an error. If this call is never
4643 executed, the program is still strictly conforming. */
4644 warned = warning_at (loc, 0,
4645 "%qT is promoted to %qT when passed through %<...%>",
4646 type, promoted_type);
4647 if (!gave_help && warned)
4649 gave_help = true;
4650 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4651 promoted_type, type);
4654 /* We can, however, treat "undefined" any way we please.
4655 Call abort to encourage the user to fix the program. */
4656 if (warned)
4657 inform (loc, "if this code is reached, the program will abort");
4658 /* Before the abort, allow the evaluation of the va_list
4659 expression to exit or longjmp. */
4660 gimplify_and_add (valist, pre_p);
4661 t = build_call_expr_loc (loc,
4662 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4663 gimplify_and_add (t, pre_p);
4665 /* This is dead code, but go ahead and finish so that the
4666 mode of the result comes out right. */
4667 *expr_p = dummy_object (type);
4668 return GS_ALL_DONE;
4670 else
4672 /* Make it easier for the backends by protecting the valist argument
4673 from multiple evaluations. */
4674 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4676 /* For this case, the backends will be expecting a pointer to
4677 TREE_TYPE (abi), but it's possible we've
4678 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4679 So fix it. */
4680 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4682 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4683 valist = fold_convert_loc (loc, p1,
4684 build_fold_addr_expr_loc (loc, valist));
4687 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4689 else
4690 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4692 if (!targetm.gimplify_va_arg_expr)
4693 /* FIXME: Once most targets are converted we should merely
4694 assert this is non-null. */
4695 return GS_ALL_DONE;
4697 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4698 return GS_OK;
4702 /* Expand EXP, a call to __builtin_va_end. */
4704 static rtx
4705 expand_builtin_va_end (tree exp)
4707 tree valist = CALL_EXPR_ARG (exp, 0);
4709 /* Evaluate for side effects, if needed. I hate macros that don't
4710 do that. */
4711 if (TREE_SIDE_EFFECTS (valist))
4712 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4714 return const0_rtx;
4717 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4718 builtin rather than just as an assignment in stdarg.h because of the
4719 nastiness of array-type va_list types. */
4721 static rtx
4722 expand_builtin_va_copy (tree exp)
4724 tree dst, src, t;
4725 location_t loc = EXPR_LOCATION (exp);
4727 dst = CALL_EXPR_ARG (exp, 0);
4728 src = CALL_EXPR_ARG (exp, 1);
4730 dst = stabilize_va_list_loc (loc, dst, 1);
4731 src = stabilize_va_list_loc (loc, src, 0);
4733 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4735 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4737 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4738 TREE_SIDE_EFFECTS (t) = 1;
4739 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4741 else
4743 rtx dstb, srcb, size;
4745 /* Evaluate to pointers. */
4746 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4747 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4748 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4749 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4751 dstb = convert_memory_address (Pmode, dstb);
4752 srcb = convert_memory_address (Pmode, srcb);
4754 /* "Dereference" to BLKmode memories. */
4755 dstb = gen_rtx_MEM (BLKmode, dstb);
4756 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4757 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4758 srcb = gen_rtx_MEM (BLKmode, srcb);
4759 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4760 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4762 /* Copy. */
4763 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4766 return const0_rtx;
4769 /* Expand a call to one of the builtin functions __builtin_frame_address or
4770 __builtin_return_address. */
4772 static rtx
4773 expand_builtin_frame_address (tree fndecl, tree exp)
4775 /* The argument must be a nonnegative integer constant.
4776 It counts the number of frames to scan up the stack.
4777 The value is the return address saved in that frame. */
4778 if (call_expr_nargs (exp) == 0)
4779 /* Warning about missing arg was already issued. */
4780 return const0_rtx;
4781 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4783 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4784 error ("invalid argument to %<__builtin_frame_address%>");
4785 else
4786 error ("invalid argument to %<__builtin_return_address%>");
4787 return const0_rtx;
4789 else
4791 rtx tem
4792 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4793 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4795 /* Some ports cannot access arbitrary stack frames. */
4796 if (tem == NULL)
4798 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4799 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4800 else
4801 warning (0, "unsupported argument to %<__builtin_return_address%>");
4802 return const0_rtx;
4805 /* For __builtin_frame_address, return what we've got. */
4806 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4807 return tem;
4809 if (!REG_P (tem)
4810 && ! CONSTANT_P (tem))
4811 tem = copy_to_mode_reg (Pmode, tem);
4812 return tem;
4816 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4817 we failed and the caller should emit a normal call, otherwise try to get
4818 the result in TARGET, if convenient. */
4820 static rtx
4821 expand_builtin_alloca (tree exp, rtx target)
4823 rtx op0;
4824 rtx result;
4826 /* Emit normal call if marked not-inlineable. */
4827 if (CALL_CANNOT_INLINE_P (exp))
4828 return NULL_RTX;
4830 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4831 return NULL_RTX;
4833 /* Compute the argument. */
4834 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4836 /* Allocate the desired space. */
4837 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4838 result = convert_memory_address (ptr_mode, result);
4840 return result;
4843 /* Expand a call to a bswap builtin with argument ARG0. MODE
4844 is the mode to expand with. */
4846 static rtx
4847 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4849 enum machine_mode mode;
4850 tree arg;
4851 rtx op0;
4853 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4854 return NULL_RTX;
4856 arg = CALL_EXPR_ARG (exp, 0);
4857 mode = TYPE_MODE (TREE_TYPE (arg));
4858 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4860 target = expand_unop (mode, bswap_optab, op0, target, 1);
4862 gcc_assert (target);
4864 return convert_to_mode (mode, target, 0);
4867 /* Expand a call to a unary builtin in EXP.
4868 Return NULL_RTX if a normal call should be emitted rather than expanding the
4869 function in-line. If convenient, the result should be placed in TARGET.
4870 SUBTARGET may be used as the target for computing one of EXP's operands. */
4872 static rtx
4873 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4874 rtx subtarget, optab op_optab)
4876 rtx op0;
4878 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4879 return NULL_RTX;
4881 /* Compute the argument. */
4882 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4883 VOIDmode, EXPAND_NORMAL);
4884 /* Compute op, into TARGET if possible.
4885 Set TARGET to wherever the result comes back. */
4886 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4887 op_optab, op0, target, 1);
4888 gcc_assert (target);
4890 return convert_to_mode (target_mode, target, 0);
4893 /* Expand a call to __builtin_expect. We just return our argument
4894 as the builtin_expect semantic should've been already executed by
4895 tree branch prediction pass. */
4897 static rtx
4898 expand_builtin_expect (tree exp, rtx target)
4900 tree arg;
4902 if (call_expr_nargs (exp) < 2)
4903 return const0_rtx;
4904 arg = CALL_EXPR_ARG (exp, 0);
4906 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4907 /* When guessing was done, the hints should be already stripped away. */
4908 gcc_assert (!flag_guess_branch_prob
4909 || optimize == 0 || errorcount || sorrycount);
4910 return target;
4913 void
4914 expand_builtin_trap (void)
4916 #ifdef HAVE_trap
4917 if (HAVE_trap)
4918 emit_insn (gen_trap ());
4919 else
4920 #endif
4921 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4922 emit_barrier ();
4925 /* Expand a call to __builtin_unreachable. We do nothing except emit
4926 a barrier saying that control flow will not pass here.
4928 It is the responsibility of the program being compiled to ensure
4929 that control flow does never reach __builtin_unreachable. */
4930 static void
4931 expand_builtin_unreachable (void)
4933 emit_barrier ();
4936 /* Expand EXP, a call to fabs, fabsf or fabsl.
4937 Return NULL_RTX if a normal call should be emitted rather than expanding
4938 the function inline. If convenient, the result should be placed
4939 in TARGET. SUBTARGET may be used as the target for computing
4940 the operand. */
4942 static rtx
4943 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4945 enum machine_mode mode;
4946 tree arg;
4947 rtx op0;
4949 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4950 return NULL_RTX;
4952 arg = CALL_EXPR_ARG (exp, 0);
4953 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4954 mode = TYPE_MODE (TREE_TYPE (arg));
4955 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4956 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4959 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4960 Return NULL is a normal call should be emitted rather than expanding the
4961 function inline. If convenient, the result should be placed in TARGET.
4962 SUBTARGET may be used as the target for computing the operand. */
4964 static rtx
4965 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4967 rtx op0, op1;
4968 tree arg;
4970 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4971 return NULL_RTX;
4973 arg = CALL_EXPR_ARG (exp, 0);
4974 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4976 arg = CALL_EXPR_ARG (exp, 1);
4977 op1 = expand_normal (arg);
4979 return expand_copysign (op0, op1, target);
4982 /* Create a new constant string literal and return a char* pointer to it.
4983 The STRING_CST value is the LEN characters at STR. */
4984 tree
4985 build_string_literal (int len, const char *str)
4987 tree t, elem, index, type;
4989 t = build_string (len, str);
4990 elem = build_type_variant (char_type_node, 1, 0);
4991 index = build_index_type (size_int (len - 1));
4992 type = build_array_type (elem, index);
4993 TREE_TYPE (t) = type;
4994 TREE_CONSTANT (t) = 1;
4995 TREE_READONLY (t) = 1;
4996 TREE_STATIC (t) = 1;
4998 type = build_pointer_type (elem);
4999 t = build1 (ADDR_EXPR, type,
5000 build4 (ARRAY_REF, elem,
5001 t, integer_zero_node, NULL_TREE, NULL_TREE));
5002 return t;
5005 /* Expand a call to either the entry or exit function profiler. */
5007 static rtx
5008 expand_builtin_profile_func (bool exitp)
5010 rtx this_rtx, which;
5012 this_rtx = DECL_RTL (current_function_decl);
5013 gcc_assert (MEM_P (this_rtx));
5014 this_rtx = XEXP (this_rtx, 0);
5016 if (exitp)
5017 which = profile_function_exit_libfunc;
5018 else
5019 which = profile_function_entry_libfunc;
5021 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5022 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5024 Pmode);
5026 return const0_rtx;
5029 /* Expand a call to __builtin___clear_cache. */
5031 static rtx
5032 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5034 #ifndef HAVE_clear_cache
5035 #ifdef CLEAR_INSN_CACHE
5036 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5037 does something. Just do the default expansion to a call to
5038 __clear_cache(). */
5039 return NULL_RTX;
5040 #else
5041 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5042 does nothing. There is no need to call it. Do nothing. */
5043 return const0_rtx;
5044 #endif /* CLEAR_INSN_CACHE */
5045 #else
5046 /* We have a "clear_cache" insn, and it will handle everything. */
5047 tree begin, end;
5048 rtx begin_rtx, end_rtx;
5049 enum insn_code icode;
5051 /* We must not expand to a library call. If we did, any
5052 fallback library function in libgcc that might contain a call to
5053 __builtin___clear_cache() would recurse infinitely. */
5054 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5056 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5057 return const0_rtx;
5060 if (HAVE_clear_cache)
5062 icode = CODE_FOR_clear_cache;
5064 begin = CALL_EXPR_ARG (exp, 0);
5065 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5066 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5067 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5068 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5070 end = CALL_EXPR_ARG (exp, 1);
5071 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5072 end_rtx = convert_memory_address (Pmode, end_rtx);
5073 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5074 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5076 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5078 return const0_rtx;
5079 #endif /* HAVE_clear_cache */
5082 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5084 static rtx
5085 round_trampoline_addr (rtx tramp)
5087 rtx temp, addend, mask;
5089 /* If we don't need too much alignment, we'll have been guaranteed
5090 proper alignment by get_trampoline_type. */
5091 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5092 return tramp;
5094 /* Round address up to desired boundary. */
5095 temp = gen_reg_rtx (Pmode);
5096 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5097 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5099 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5100 temp, 0, OPTAB_LIB_WIDEN);
5101 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5102 temp, 0, OPTAB_LIB_WIDEN);
5104 return tramp;
5107 static rtx
5108 expand_builtin_init_trampoline (tree exp)
5110 tree t_tramp, t_func, t_chain;
5111 rtx m_tramp, r_tramp, r_chain, tmp;
5113 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5114 POINTER_TYPE, VOID_TYPE))
5115 return NULL_RTX;
5117 t_tramp = CALL_EXPR_ARG (exp, 0);
5118 t_func = CALL_EXPR_ARG (exp, 1);
5119 t_chain = CALL_EXPR_ARG (exp, 2);
5121 r_tramp = expand_normal (t_tramp);
5122 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5123 MEM_NOTRAP_P (m_tramp) = 1;
5125 /* The TRAMP argument should be the address of a field within the
5126 local function's FRAME decl. Let's see if we can fill in the
5127 to fill in the MEM_ATTRs for this memory. */
5128 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5129 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5130 true, 0);
5132 tmp = round_trampoline_addr (r_tramp);
5133 if (tmp != r_tramp)
5135 m_tramp = change_address (m_tramp, BLKmode, tmp);
5136 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5137 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5140 /* The FUNC argument should be the address of the nested function.
5141 Extract the actual function decl to pass to the hook. */
5142 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5143 t_func = TREE_OPERAND (t_func, 0);
5144 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5146 r_chain = expand_normal (t_chain);
5148 /* Generate insns to initialize the trampoline. */
5149 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5151 trampolines_created = 1;
5152 return const0_rtx;
5155 static rtx
5156 expand_builtin_adjust_trampoline (tree exp)
5158 rtx tramp;
5160 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5161 return NULL_RTX;
5163 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5164 tramp = round_trampoline_addr (tramp);
5165 if (targetm.calls.trampoline_adjust_address)
5166 tramp = targetm.calls.trampoline_adjust_address (tramp);
5168 return tramp;
5171 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5172 function. The function first checks whether the back end provides
5173 an insn to implement signbit for the respective mode. If not, it
5174 checks whether the floating point format of the value is such that
5175 the sign bit can be extracted. If that is not the case, the
5176 function returns NULL_RTX to indicate that a normal call should be
5177 emitted rather than expanding the function in-line. EXP is the
5178 expression that is a call to the builtin function; if convenient,
5179 the result should be placed in TARGET. */
5180 static rtx
5181 expand_builtin_signbit (tree exp, rtx target)
5183 const struct real_format *fmt;
5184 enum machine_mode fmode, imode, rmode;
5185 HOST_WIDE_INT hi, lo;
5186 tree arg;
5187 int word, bitpos;
5188 enum insn_code icode;
5189 rtx temp;
5190 location_t loc = EXPR_LOCATION (exp);
5192 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5193 return NULL_RTX;
5195 arg = CALL_EXPR_ARG (exp, 0);
5196 fmode = TYPE_MODE (TREE_TYPE (arg));
5197 rmode = TYPE_MODE (TREE_TYPE (exp));
5198 fmt = REAL_MODE_FORMAT (fmode);
5200 arg = builtin_save_expr (arg);
5202 /* Expand the argument yielding a RTX expression. */
5203 temp = expand_normal (arg);
5205 /* Check if the back end provides an insn that handles signbit for the
5206 argument's mode. */
5207 icode = signbit_optab->handlers [(int) fmode].insn_code;
5208 if (icode != CODE_FOR_nothing)
5210 rtx last = get_last_insn ();
5211 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5212 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5213 return target;
5214 delete_insns_since (last);
5217 /* For floating point formats without a sign bit, implement signbit
5218 as "ARG < 0.0". */
5219 bitpos = fmt->signbit_ro;
5220 if (bitpos < 0)
5222 /* But we can't do this if the format supports signed zero. */
5223 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5224 return NULL_RTX;
5226 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5227 build_real (TREE_TYPE (arg), dconst0));
5228 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5231 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5233 imode = int_mode_for_mode (fmode);
5234 if (imode == BLKmode)
5235 return NULL_RTX;
5236 temp = gen_lowpart (imode, temp);
5238 else
5240 imode = word_mode;
5241 /* Handle targets with different FP word orders. */
5242 if (FLOAT_WORDS_BIG_ENDIAN)
5243 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5244 else
5245 word = bitpos / BITS_PER_WORD;
5246 temp = operand_subword_force (temp, word, fmode);
5247 bitpos = bitpos % BITS_PER_WORD;
5250 /* Force the intermediate word_mode (or narrower) result into a
5251 register. This avoids attempting to create paradoxical SUBREGs
5252 of floating point modes below. */
5253 temp = force_reg (imode, temp);
5255 /* If the bitpos is within the "result mode" lowpart, the operation
5256 can be implement with a single bitwise AND. Otherwise, we need
5257 a right shift and an AND. */
5259 if (bitpos < GET_MODE_BITSIZE (rmode))
5261 if (bitpos < HOST_BITS_PER_WIDE_INT)
5263 hi = 0;
5264 lo = (HOST_WIDE_INT) 1 << bitpos;
5266 else
5268 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5269 lo = 0;
5272 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5273 temp = gen_lowpart (rmode, temp);
5274 temp = expand_binop (rmode, and_optab, temp,
5275 immed_double_const (lo, hi, rmode),
5276 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5278 else
5280 /* Perform a logical right shift to place the signbit in the least
5281 significant bit, then truncate the result to the desired mode
5282 and mask just this bit. */
5283 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5284 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5285 temp = gen_lowpart (rmode, temp);
5286 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5287 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5290 return temp;
5293 /* Expand fork or exec calls. TARGET is the desired target of the
5294 call. EXP is the call. FN is the
5295 identificator of the actual function. IGNORE is nonzero if the
5296 value is to be ignored. */
5298 static rtx
5299 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5301 tree id, decl;
5302 tree call;
5304 /* If we are not profiling, just call the function. */
5305 if (!profile_arc_flag)
5306 return NULL_RTX;
5308 /* Otherwise call the wrapper. This should be equivalent for the rest of
5309 compiler, so the code does not diverge, and the wrapper may run the
5310 code necessary for keeping the profiling sane. */
5312 switch (DECL_FUNCTION_CODE (fn))
5314 case BUILT_IN_FORK:
5315 id = get_identifier ("__gcov_fork");
5316 break;
5318 case BUILT_IN_EXECL:
5319 id = get_identifier ("__gcov_execl");
5320 break;
5322 case BUILT_IN_EXECV:
5323 id = get_identifier ("__gcov_execv");
5324 break;
5326 case BUILT_IN_EXECLP:
5327 id = get_identifier ("__gcov_execlp");
5328 break;
5330 case BUILT_IN_EXECLE:
5331 id = get_identifier ("__gcov_execle");
5332 break;
5334 case BUILT_IN_EXECVP:
5335 id = get_identifier ("__gcov_execvp");
5336 break;
5338 case BUILT_IN_EXECVE:
5339 id = get_identifier ("__gcov_execve");
5340 break;
5342 default:
5343 gcc_unreachable ();
5346 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5347 FUNCTION_DECL, id, TREE_TYPE (fn));
5348 DECL_EXTERNAL (decl) = 1;
5349 TREE_PUBLIC (decl) = 1;
5350 DECL_ARTIFICIAL (decl) = 1;
5351 TREE_NOTHROW (decl) = 1;
5352 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5353 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5354 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5355 return expand_call (call, target, ignore);
5360 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5361 the pointer in these functions is void*, the tree optimizers may remove
5362 casts. The mode computed in expand_builtin isn't reliable either, due
5363 to __sync_bool_compare_and_swap.
5365 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5366 group of builtins. This gives us log2 of the mode size. */
5368 static inline enum machine_mode
5369 get_builtin_sync_mode (int fcode_diff)
5371 /* The size is not negotiable, so ask not to get BLKmode in return
5372 if the target indicates that a smaller size would be better. */
5373 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5376 /* Expand the memory expression LOC and return the appropriate memory operand
5377 for the builtin_sync operations. */
5379 static rtx
5380 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5382 rtx addr, mem;
5384 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5385 addr = convert_memory_address (Pmode, addr);
5387 /* Note that we explicitly do not want any alias information for this
5388 memory, so that we kill all other live memories. Otherwise we don't
5389 satisfy the full barrier semantics of the intrinsic. */
5390 mem = validize_mem (gen_rtx_MEM (mode, addr));
5392 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5393 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5394 MEM_VOLATILE_P (mem) = 1;
5396 return mem;
5399 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5400 EXP is the CALL_EXPR. CODE is the rtx code
5401 that corresponds to the arithmetic or logical operation from the name;
5402 an exception here is that NOT actually means NAND. TARGET is an optional
5403 place for us to store the results; AFTER is true if this is the
5404 fetch_and_xxx form. IGNORE is true if we don't actually care about
5405 the result of the operation at all. */
5407 static rtx
5408 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5409 enum rtx_code code, bool after,
5410 rtx target, bool ignore)
5412 rtx val, mem;
5413 enum machine_mode old_mode;
5414 location_t loc = EXPR_LOCATION (exp);
5416 if (code == NOT && warn_sync_nand)
5418 tree fndecl = get_callee_fndecl (exp);
5419 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5421 static bool warned_f_a_n, warned_n_a_f;
5423 switch (fcode)
5425 case BUILT_IN_FETCH_AND_NAND_1:
5426 case BUILT_IN_FETCH_AND_NAND_2:
5427 case BUILT_IN_FETCH_AND_NAND_4:
5428 case BUILT_IN_FETCH_AND_NAND_8:
5429 case BUILT_IN_FETCH_AND_NAND_16:
5431 if (warned_f_a_n)
5432 break;
5434 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5435 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5436 warned_f_a_n = true;
5437 break;
5439 case BUILT_IN_NAND_AND_FETCH_1:
5440 case BUILT_IN_NAND_AND_FETCH_2:
5441 case BUILT_IN_NAND_AND_FETCH_4:
5442 case BUILT_IN_NAND_AND_FETCH_8:
5443 case BUILT_IN_NAND_AND_FETCH_16:
5445 if (warned_n_a_f)
5446 break;
5448 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5449 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5450 warned_n_a_f = true;
5451 break;
5453 default:
5454 gcc_unreachable ();
5458 /* Expand the operands. */
5459 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5461 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5462 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5463 of CONST_INTs, where we know the old_mode only from the call argument. */
5464 old_mode = GET_MODE (val);
5465 if (old_mode == VOIDmode)
5466 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5467 val = convert_modes (mode, old_mode, val, 1);
5469 if (ignore)
5470 return expand_sync_operation (mem, val, code);
5471 else
5472 return expand_sync_fetch_operation (mem, val, code, after, target);
5475 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5476 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5477 true if this is the boolean form. TARGET is a place for us to store the
5478 results; this is NOT optional if IS_BOOL is true. */
5480 static rtx
5481 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5482 bool is_bool, rtx target)
5484 rtx old_val, new_val, mem;
5485 enum machine_mode old_mode;
5487 /* Expand the operands. */
5488 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5491 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5492 mode, EXPAND_NORMAL);
5493 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5494 of CONST_INTs, where we know the old_mode only from the call argument. */
5495 old_mode = GET_MODE (old_val);
5496 if (old_mode == VOIDmode)
5497 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5498 old_val = convert_modes (mode, old_mode, old_val, 1);
5500 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5501 mode, EXPAND_NORMAL);
5502 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5503 of CONST_INTs, where we know the old_mode only from the call argument. */
5504 old_mode = GET_MODE (new_val);
5505 if (old_mode == VOIDmode)
5506 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5507 new_val = convert_modes (mode, old_mode, new_val, 1);
5509 if (is_bool)
5510 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5511 else
5512 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5515 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5516 general form is actually an atomic exchange, and some targets only
5517 support a reduced form with the second argument being a constant 1.
5518 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5519 the results. */
5521 static rtx
5522 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5523 rtx target)
5525 rtx val, mem;
5526 enum machine_mode old_mode;
5528 /* Expand the operands. */
5529 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5530 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5531 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5532 of CONST_INTs, where we know the old_mode only from the call argument. */
5533 old_mode = GET_MODE (val);
5534 if (old_mode == VOIDmode)
5535 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5536 val = convert_modes (mode, old_mode, val, 1);
5538 return expand_sync_lock_test_and_set (mem, val, target);
5541 /* Expand the __sync_synchronize intrinsic. */
5543 static void
5544 expand_builtin_synchronize (void)
5546 gimple x;
5547 VEC (tree, gc) *v_clobbers;
5549 #ifdef HAVE_memory_barrier
5550 if (HAVE_memory_barrier)
5552 emit_insn (gen_memory_barrier ());
5553 return;
5555 #endif
5557 if (synchronize_libfunc != NULL_RTX)
5559 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5560 return;
5563 /* If no explicit memory barrier instruction is available, create an
5564 empty asm stmt with a memory clobber. */
5565 v_clobbers = VEC_alloc (tree, gc, 1);
5566 VEC_quick_push (tree, v_clobbers,
5567 tree_cons (NULL, build_string (6, "memory"), NULL));
5568 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5569 gimple_asm_set_volatile (x, true);
5570 expand_asm_stmt (x);
5573 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5575 static void
5576 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5578 enum insn_code icode;
5579 rtx mem, insn;
5580 rtx val = const0_rtx;
5582 /* Expand the operands. */
5583 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5585 /* If there is an explicit operation in the md file, use it. */
5586 icode = sync_lock_release[mode];
5587 if (icode != CODE_FOR_nothing)
5589 if (!insn_data[icode].operand[1].predicate (val, mode))
5590 val = force_reg (mode, val);
5592 insn = GEN_FCN (icode) (mem, val);
5593 if (insn)
5595 emit_insn (insn);
5596 return;
5600 /* Otherwise we can implement this operation by emitting a barrier
5601 followed by a store of zero. */
5602 expand_builtin_synchronize ();
5603 emit_move_insn (mem, val);
5606 /* Expand an expression EXP that calls a built-in function,
5607 with result going to TARGET if that's convenient
5608 (and in mode MODE if that's convenient).
5609 SUBTARGET may be used as the target for computing one of EXP's operands.
5610 IGNORE is nonzero if the value is to be ignored. */
5613 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5614 int ignore)
5616 tree fndecl = get_callee_fndecl (exp);
5617 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5618 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5620 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5621 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5623 /* When not optimizing, generate calls to library functions for a certain
5624 set of builtins. */
5625 if (!optimize
5626 && !called_as_built_in (fndecl)
5627 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5628 && fcode != BUILT_IN_ALLOCA
5629 && fcode != BUILT_IN_FREE)
5630 return expand_call (exp, target, ignore);
5632 /* The built-in function expanders test for target == const0_rtx
5633 to determine whether the function's result will be ignored. */
5634 if (ignore)
5635 target = const0_rtx;
5637 /* If the result of a pure or const built-in function is ignored, and
5638 none of its arguments are volatile, we can avoid expanding the
5639 built-in call and just evaluate the arguments for side-effects. */
5640 if (target == const0_rtx
5641 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5643 bool volatilep = false;
5644 tree arg;
5645 call_expr_arg_iterator iter;
5647 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5648 if (TREE_THIS_VOLATILE (arg))
5650 volatilep = true;
5651 break;
5654 if (! volatilep)
5656 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5657 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5658 return const0_rtx;
5662 switch (fcode)
5664 CASE_FLT_FN (BUILT_IN_FABS):
5665 target = expand_builtin_fabs (exp, target, subtarget);
5666 if (target)
5667 return target;
5668 break;
5670 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5671 target = expand_builtin_copysign (exp, target, subtarget);
5672 if (target)
5673 return target;
5674 break;
5676 /* Just do a normal library call if we were unable to fold
5677 the values. */
5678 CASE_FLT_FN (BUILT_IN_CABS):
5679 break;
5681 CASE_FLT_FN (BUILT_IN_EXP):
5682 CASE_FLT_FN (BUILT_IN_EXP10):
5683 CASE_FLT_FN (BUILT_IN_POW10):
5684 CASE_FLT_FN (BUILT_IN_EXP2):
5685 CASE_FLT_FN (BUILT_IN_EXPM1):
5686 CASE_FLT_FN (BUILT_IN_LOGB):
5687 CASE_FLT_FN (BUILT_IN_LOG):
5688 CASE_FLT_FN (BUILT_IN_LOG10):
5689 CASE_FLT_FN (BUILT_IN_LOG2):
5690 CASE_FLT_FN (BUILT_IN_LOG1P):
5691 CASE_FLT_FN (BUILT_IN_TAN):
5692 CASE_FLT_FN (BUILT_IN_ASIN):
5693 CASE_FLT_FN (BUILT_IN_ACOS):
5694 CASE_FLT_FN (BUILT_IN_ATAN):
5695 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5696 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5697 because of possible accuracy problems. */
5698 if (! flag_unsafe_math_optimizations)
5699 break;
5700 CASE_FLT_FN (BUILT_IN_SQRT):
5701 CASE_FLT_FN (BUILT_IN_FLOOR):
5702 CASE_FLT_FN (BUILT_IN_CEIL):
5703 CASE_FLT_FN (BUILT_IN_TRUNC):
5704 CASE_FLT_FN (BUILT_IN_ROUND):
5705 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5706 CASE_FLT_FN (BUILT_IN_RINT):
5707 target = expand_builtin_mathfn (exp, target, subtarget);
5708 if (target)
5709 return target;
5710 break;
5712 CASE_FLT_FN (BUILT_IN_ILOGB):
5713 if (! flag_unsafe_math_optimizations)
5714 break;
5715 CASE_FLT_FN (BUILT_IN_ISINF):
5716 CASE_FLT_FN (BUILT_IN_FINITE):
5717 case BUILT_IN_ISFINITE:
5718 case BUILT_IN_ISNORMAL:
5719 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5720 if (target)
5721 return target;
5722 break;
5724 CASE_FLT_FN (BUILT_IN_LCEIL):
5725 CASE_FLT_FN (BUILT_IN_LLCEIL):
5726 CASE_FLT_FN (BUILT_IN_LFLOOR):
5727 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5728 target = expand_builtin_int_roundingfn (exp, target);
5729 if (target)
5730 return target;
5731 break;
5733 CASE_FLT_FN (BUILT_IN_LRINT):
5734 CASE_FLT_FN (BUILT_IN_LLRINT):
5735 CASE_FLT_FN (BUILT_IN_LROUND):
5736 CASE_FLT_FN (BUILT_IN_LLROUND):
5737 target = expand_builtin_int_roundingfn_2 (exp, target);
5738 if (target)
5739 return target;
5740 break;
5742 CASE_FLT_FN (BUILT_IN_POW):
5743 target = expand_builtin_pow (exp, target, subtarget);
5744 if (target)
5745 return target;
5746 break;
5748 CASE_FLT_FN (BUILT_IN_POWI):
5749 target = expand_builtin_powi (exp, target, subtarget);
5750 if (target)
5751 return target;
5752 break;
5754 CASE_FLT_FN (BUILT_IN_ATAN2):
5755 CASE_FLT_FN (BUILT_IN_LDEXP):
5756 CASE_FLT_FN (BUILT_IN_SCALB):
5757 CASE_FLT_FN (BUILT_IN_SCALBN):
5758 CASE_FLT_FN (BUILT_IN_SCALBLN):
5759 if (! flag_unsafe_math_optimizations)
5760 break;
5762 CASE_FLT_FN (BUILT_IN_FMOD):
5763 CASE_FLT_FN (BUILT_IN_REMAINDER):
5764 CASE_FLT_FN (BUILT_IN_DREM):
5765 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5766 if (target)
5767 return target;
5768 break;
5770 CASE_FLT_FN (BUILT_IN_CEXPI):
5771 target = expand_builtin_cexpi (exp, target, subtarget);
5772 gcc_assert (target);
5773 return target;
5775 CASE_FLT_FN (BUILT_IN_SIN):
5776 CASE_FLT_FN (BUILT_IN_COS):
5777 if (! flag_unsafe_math_optimizations)
5778 break;
5779 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5780 if (target)
5781 return target;
5782 break;
5784 CASE_FLT_FN (BUILT_IN_SINCOS):
5785 if (! flag_unsafe_math_optimizations)
5786 break;
5787 target = expand_builtin_sincos (exp);
5788 if (target)
5789 return target;
5790 break;
5792 case BUILT_IN_APPLY_ARGS:
5793 return expand_builtin_apply_args ();
5795 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5796 FUNCTION with a copy of the parameters described by
5797 ARGUMENTS, and ARGSIZE. It returns a block of memory
5798 allocated on the stack into which is stored all the registers
5799 that might possibly be used for returning the result of a
5800 function. ARGUMENTS is the value returned by
5801 __builtin_apply_args. ARGSIZE is the number of bytes of
5802 arguments that must be copied. ??? How should this value be
5803 computed? We'll also need a safe worst case value for varargs
5804 functions. */
5805 case BUILT_IN_APPLY:
5806 if (!validate_arglist (exp, POINTER_TYPE,
5807 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5808 && !validate_arglist (exp, REFERENCE_TYPE,
5809 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5810 return const0_rtx;
5811 else
5813 rtx ops[3];
5815 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5816 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5817 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5819 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5822 /* __builtin_return (RESULT) causes the function to return the
5823 value described by RESULT. RESULT is address of the block of
5824 memory returned by __builtin_apply. */
5825 case BUILT_IN_RETURN:
5826 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5827 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5828 return const0_rtx;
5830 case BUILT_IN_SAVEREGS:
5831 return expand_builtin_saveregs ();
5833 case BUILT_IN_ARGS_INFO:
5834 return expand_builtin_args_info (exp);
5836 case BUILT_IN_VA_ARG_PACK:
5837 /* All valid uses of __builtin_va_arg_pack () are removed during
5838 inlining. */
5839 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5840 return const0_rtx;
5842 case BUILT_IN_VA_ARG_PACK_LEN:
5843 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5844 inlining. */
5845 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5846 return const0_rtx;
5848 /* Return the address of the first anonymous stack arg. */
5849 case BUILT_IN_NEXT_ARG:
5850 if (fold_builtin_next_arg (exp, false))
5851 return const0_rtx;
5852 return expand_builtin_next_arg ();
5854 case BUILT_IN_CLEAR_CACHE:
5855 target = expand_builtin___clear_cache (exp);
5856 if (target)
5857 return target;
5858 break;
5860 case BUILT_IN_CLASSIFY_TYPE:
5861 return expand_builtin_classify_type (exp);
5863 case BUILT_IN_CONSTANT_P:
5864 return const0_rtx;
5866 case BUILT_IN_FRAME_ADDRESS:
5867 case BUILT_IN_RETURN_ADDRESS:
5868 return expand_builtin_frame_address (fndecl, exp);
5870 /* Returns the address of the area where the structure is returned.
5871 0 otherwise. */
5872 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5873 if (call_expr_nargs (exp) != 0
5874 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5875 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5876 return const0_rtx;
5877 else
5878 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5880 case BUILT_IN_ALLOCA:
5881 target = expand_builtin_alloca (exp, target);
5882 if (target)
5883 return target;
5884 break;
5886 case BUILT_IN_STACK_SAVE:
5887 return expand_stack_save ();
5889 case BUILT_IN_STACK_RESTORE:
5890 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5891 return const0_rtx;
5893 case BUILT_IN_BSWAP32:
5894 case BUILT_IN_BSWAP64:
5895 target = expand_builtin_bswap (exp, target, subtarget);
5897 if (target)
5898 return target;
5899 break;
5901 CASE_INT_FN (BUILT_IN_FFS):
5902 case BUILT_IN_FFSIMAX:
5903 target = expand_builtin_unop (target_mode, exp, target,
5904 subtarget, ffs_optab);
5905 if (target)
5906 return target;
5907 break;
5909 CASE_INT_FN (BUILT_IN_CLZ):
5910 case BUILT_IN_CLZIMAX:
5911 target = expand_builtin_unop (target_mode, exp, target,
5912 subtarget, clz_optab);
5913 if (target)
5914 return target;
5915 break;
5917 CASE_INT_FN (BUILT_IN_CTZ):
5918 case BUILT_IN_CTZIMAX:
5919 target = expand_builtin_unop (target_mode, exp, target,
5920 subtarget, ctz_optab);
5921 if (target)
5922 return target;
5923 break;
5925 CASE_INT_FN (BUILT_IN_POPCOUNT):
5926 case BUILT_IN_POPCOUNTIMAX:
5927 target = expand_builtin_unop (target_mode, exp, target,
5928 subtarget, popcount_optab);
5929 if (target)
5930 return target;
5931 break;
5933 CASE_INT_FN (BUILT_IN_PARITY):
5934 case BUILT_IN_PARITYIMAX:
5935 target = expand_builtin_unop (target_mode, exp, target,
5936 subtarget, parity_optab);
5937 if (target)
5938 return target;
5939 break;
5941 case BUILT_IN_STRLEN:
5942 target = expand_builtin_strlen (exp, target, target_mode);
5943 if (target)
5944 return target;
5945 break;
5947 case BUILT_IN_STRCPY:
5948 target = expand_builtin_strcpy (exp, target);
5949 if (target)
5950 return target;
5951 break;
5953 case BUILT_IN_STRNCPY:
5954 target = expand_builtin_strncpy (exp, target);
5955 if (target)
5956 return target;
5957 break;
5959 case BUILT_IN_STPCPY:
5960 target = expand_builtin_stpcpy (exp, target, mode);
5961 if (target)
5962 return target;
5963 break;
5965 case BUILT_IN_MEMCPY:
5966 target = expand_builtin_memcpy (exp, target);
5967 if (target)
5968 return target;
5969 break;
5971 case BUILT_IN_MEMPCPY:
5972 target = expand_builtin_mempcpy (exp, target, mode);
5973 if (target)
5974 return target;
5975 break;
5977 case BUILT_IN_MEMSET:
5978 target = expand_builtin_memset (exp, target, mode);
5979 if (target)
5980 return target;
5981 break;
5983 case BUILT_IN_BZERO:
5984 target = expand_builtin_bzero (exp);
5985 if (target)
5986 return target;
5987 break;
5989 case BUILT_IN_STRCMP:
5990 target = expand_builtin_strcmp (exp, target);
5991 if (target)
5992 return target;
5993 break;
5995 case BUILT_IN_STRNCMP:
5996 target = expand_builtin_strncmp (exp, target, mode);
5997 if (target)
5998 return target;
5999 break;
6001 case BUILT_IN_BCMP:
6002 case BUILT_IN_MEMCMP:
6003 target = expand_builtin_memcmp (exp, target, mode);
6004 if (target)
6005 return target;
6006 break;
6008 case BUILT_IN_SETJMP:
6009 /* This should have been lowered to the builtins below. */
6010 gcc_unreachable ();
6012 case BUILT_IN_SETJMP_SETUP:
6013 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6014 and the receiver label. */
6015 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6017 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6018 VOIDmode, EXPAND_NORMAL);
6019 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6020 rtx label_r = label_rtx (label);
6022 /* This is copied from the handling of non-local gotos. */
6023 expand_builtin_setjmp_setup (buf_addr, label_r);
6024 nonlocal_goto_handler_labels
6025 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6026 nonlocal_goto_handler_labels);
6027 /* ??? Do not let expand_label treat us as such since we would
6028 not want to be both on the list of non-local labels and on
6029 the list of forced labels. */
6030 FORCED_LABEL (label) = 0;
6031 return const0_rtx;
6033 break;
6035 case BUILT_IN_SETJMP_DISPATCHER:
6036 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6037 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6039 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6040 rtx label_r = label_rtx (label);
6042 /* Remove the dispatcher label from the list of non-local labels
6043 since the receiver labels have been added to it above. */
6044 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6045 return const0_rtx;
6047 break;
6049 case BUILT_IN_SETJMP_RECEIVER:
6050 /* __builtin_setjmp_receiver is passed the receiver label. */
6051 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6053 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6054 rtx label_r = label_rtx (label);
6056 expand_builtin_setjmp_receiver (label_r);
6057 return const0_rtx;
6059 break;
6061 /* __builtin_longjmp is passed a pointer to an array of five words.
6062 It's similar to the C library longjmp function but works with
6063 __builtin_setjmp above. */
6064 case BUILT_IN_LONGJMP:
6065 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6067 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6068 VOIDmode, EXPAND_NORMAL);
6069 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6071 if (value != const1_rtx)
6073 error ("%<__builtin_longjmp%> second argument must be 1");
6074 return const0_rtx;
6077 expand_builtin_longjmp (buf_addr, value);
6078 return const0_rtx;
6080 break;
6082 case BUILT_IN_NONLOCAL_GOTO:
6083 target = expand_builtin_nonlocal_goto (exp);
6084 if (target)
6085 return target;
6086 break;
6088 /* This updates the setjmp buffer that is its argument with the value
6089 of the current stack pointer. */
6090 case BUILT_IN_UPDATE_SETJMP_BUF:
6091 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6093 rtx buf_addr
6094 = expand_normal (CALL_EXPR_ARG (exp, 0));
6096 expand_builtin_update_setjmp_buf (buf_addr);
6097 return const0_rtx;
6099 break;
6101 case BUILT_IN_TRAP:
6102 expand_builtin_trap ();
6103 return const0_rtx;
6105 case BUILT_IN_UNREACHABLE:
6106 expand_builtin_unreachable ();
6107 return const0_rtx;
6109 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6110 case BUILT_IN_SIGNBITD32:
6111 case BUILT_IN_SIGNBITD64:
6112 case BUILT_IN_SIGNBITD128:
6113 target = expand_builtin_signbit (exp, target);
6114 if (target)
6115 return target;
6116 break;
6118 /* Various hooks for the DWARF 2 __throw routine. */
6119 case BUILT_IN_UNWIND_INIT:
6120 expand_builtin_unwind_init ();
6121 return const0_rtx;
6122 case BUILT_IN_DWARF_CFA:
6123 return virtual_cfa_rtx;
6124 #ifdef DWARF2_UNWIND_INFO
6125 case BUILT_IN_DWARF_SP_COLUMN:
6126 return expand_builtin_dwarf_sp_column ();
6127 case BUILT_IN_INIT_DWARF_REG_SIZES:
6128 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6129 return const0_rtx;
6130 #endif
6131 case BUILT_IN_FROB_RETURN_ADDR:
6132 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6133 case BUILT_IN_EXTRACT_RETURN_ADDR:
6134 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6135 case BUILT_IN_EH_RETURN:
6136 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6137 CALL_EXPR_ARG (exp, 1));
6138 return const0_rtx;
6139 #ifdef EH_RETURN_DATA_REGNO
6140 case BUILT_IN_EH_RETURN_DATA_REGNO:
6141 return expand_builtin_eh_return_data_regno (exp);
6142 #endif
6143 case BUILT_IN_EXTEND_POINTER:
6144 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6145 case BUILT_IN_EH_POINTER:
6146 return expand_builtin_eh_pointer (exp);
6147 case BUILT_IN_EH_FILTER:
6148 return expand_builtin_eh_filter (exp);
6149 case BUILT_IN_EH_COPY_VALUES:
6150 return expand_builtin_eh_copy_values (exp);
6152 case BUILT_IN_VA_START:
6153 return expand_builtin_va_start (exp);
6154 case BUILT_IN_VA_END:
6155 return expand_builtin_va_end (exp);
6156 case BUILT_IN_VA_COPY:
6157 return expand_builtin_va_copy (exp);
6158 case BUILT_IN_EXPECT:
6159 return expand_builtin_expect (exp, target);
6160 case BUILT_IN_PREFETCH:
6161 expand_builtin_prefetch (exp);
6162 return const0_rtx;
6164 case BUILT_IN_PROFILE_FUNC_ENTER:
6165 return expand_builtin_profile_func (false);
6166 case BUILT_IN_PROFILE_FUNC_EXIT:
6167 return expand_builtin_profile_func (true);
6169 case BUILT_IN_INIT_TRAMPOLINE:
6170 return expand_builtin_init_trampoline (exp);
6171 case BUILT_IN_ADJUST_TRAMPOLINE:
6172 return expand_builtin_adjust_trampoline (exp);
6174 case BUILT_IN_FORK:
6175 case BUILT_IN_EXECL:
6176 case BUILT_IN_EXECV:
6177 case BUILT_IN_EXECLP:
6178 case BUILT_IN_EXECLE:
6179 case BUILT_IN_EXECVP:
6180 case BUILT_IN_EXECVE:
6181 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6182 if (target)
6183 return target;
6184 break;
6186 case BUILT_IN_FETCH_AND_ADD_1:
6187 case BUILT_IN_FETCH_AND_ADD_2:
6188 case BUILT_IN_FETCH_AND_ADD_4:
6189 case BUILT_IN_FETCH_AND_ADD_8:
6190 case BUILT_IN_FETCH_AND_ADD_16:
6191 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6192 target = expand_builtin_sync_operation (mode, exp, PLUS,
6193 false, target, ignore);
6194 if (target)
6195 return target;
6196 break;
6198 case BUILT_IN_FETCH_AND_SUB_1:
6199 case BUILT_IN_FETCH_AND_SUB_2:
6200 case BUILT_IN_FETCH_AND_SUB_4:
6201 case BUILT_IN_FETCH_AND_SUB_8:
6202 case BUILT_IN_FETCH_AND_SUB_16:
6203 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6204 target = expand_builtin_sync_operation (mode, exp, MINUS,
6205 false, target, ignore);
6206 if (target)
6207 return target;
6208 break;
6210 case BUILT_IN_FETCH_AND_OR_1:
6211 case BUILT_IN_FETCH_AND_OR_2:
6212 case BUILT_IN_FETCH_AND_OR_4:
6213 case BUILT_IN_FETCH_AND_OR_8:
6214 case BUILT_IN_FETCH_AND_OR_16:
6215 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6216 target = expand_builtin_sync_operation (mode, exp, IOR,
6217 false, target, ignore);
6218 if (target)
6219 return target;
6220 break;
6222 case BUILT_IN_FETCH_AND_AND_1:
6223 case BUILT_IN_FETCH_AND_AND_2:
6224 case BUILT_IN_FETCH_AND_AND_4:
6225 case BUILT_IN_FETCH_AND_AND_8:
6226 case BUILT_IN_FETCH_AND_AND_16:
6227 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6228 target = expand_builtin_sync_operation (mode, exp, AND,
6229 false, target, ignore);
6230 if (target)
6231 return target;
6232 break;
6234 case BUILT_IN_FETCH_AND_XOR_1:
6235 case BUILT_IN_FETCH_AND_XOR_2:
6236 case BUILT_IN_FETCH_AND_XOR_4:
6237 case BUILT_IN_FETCH_AND_XOR_8:
6238 case BUILT_IN_FETCH_AND_XOR_16:
6239 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6240 target = expand_builtin_sync_operation (mode, exp, XOR,
6241 false, target, ignore);
6242 if (target)
6243 return target;
6244 break;
6246 case BUILT_IN_FETCH_AND_NAND_1:
6247 case BUILT_IN_FETCH_AND_NAND_2:
6248 case BUILT_IN_FETCH_AND_NAND_4:
6249 case BUILT_IN_FETCH_AND_NAND_8:
6250 case BUILT_IN_FETCH_AND_NAND_16:
6251 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6252 target = expand_builtin_sync_operation (mode, exp, NOT,
6253 false, target, ignore);
6254 if (target)
6255 return target;
6256 break;
6258 case BUILT_IN_ADD_AND_FETCH_1:
6259 case BUILT_IN_ADD_AND_FETCH_2:
6260 case BUILT_IN_ADD_AND_FETCH_4:
6261 case BUILT_IN_ADD_AND_FETCH_8:
6262 case BUILT_IN_ADD_AND_FETCH_16:
6263 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6264 target = expand_builtin_sync_operation (mode, exp, PLUS,
6265 true, target, ignore);
6266 if (target)
6267 return target;
6268 break;
6270 case BUILT_IN_SUB_AND_FETCH_1:
6271 case BUILT_IN_SUB_AND_FETCH_2:
6272 case BUILT_IN_SUB_AND_FETCH_4:
6273 case BUILT_IN_SUB_AND_FETCH_8:
6274 case BUILT_IN_SUB_AND_FETCH_16:
6275 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6276 target = expand_builtin_sync_operation (mode, exp, MINUS,
6277 true, target, ignore);
6278 if (target)
6279 return target;
6280 break;
6282 case BUILT_IN_OR_AND_FETCH_1:
6283 case BUILT_IN_OR_AND_FETCH_2:
6284 case BUILT_IN_OR_AND_FETCH_4:
6285 case BUILT_IN_OR_AND_FETCH_8:
6286 case BUILT_IN_OR_AND_FETCH_16:
6287 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6288 target = expand_builtin_sync_operation (mode, exp, IOR,
6289 true, target, ignore);
6290 if (target)
6291 return target;
6292 break;
6294 case BUILT_IN_AND_AND_FETCH_1:
6295 case BUILT_IN_AND_AND_FETCH_2:
6296 case BUILT_IN_AND_AND_FETCH_4:
6297 case BUILT_IN_AND_AND_FETCH_8:
6298 case BUILT_IN_AND_AND_FETCH_16:
6299 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6300 target = expand_builtin_sync_operation (mode, exp, AND,
6301 true, target, ignore);
6302 if (target)
6303 return target;
6304 break;
6306 case BUILT_IN_XOR_AND_FETCH_1:
6307 case BUILT_IN_XOR_AND_FETCH_2:
6308 case BUILT_IN_XOR_AND_FETCH_4:
6309 case BUILT_IN_XOR_AND_FETCH_8:
6310 case BUILT_IN_XOR_AND_FETCH_16:
6311 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6312 target = expand_builtin_sync_operation (mode, exp, XOR,
6313 true, target, ignore);
6314 if (target)
6315 return target;
6316 break;
6318 case BUILT_IN_NAND_AND_FETCH_1:
6319 case BUILT_IN_NAND_AND_FETCH_2:
6320 case BUILT_IN_NAND_AND_FETCH_4:
6321 case BUILT_IN_NAND_AND_FETCH_8:
6322 case BUILT_IN_NAND_AND_FETCH_16:
6323 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6324 target = expand_builtin_sync_operation (mode, exp, NOT,
6325 true, target, ignore);
6326 if (target)
6327 return target;
6328 break;
6330 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6331 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6332 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6333 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6334 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6335 if (mode == VOIDmode)
6336 mode = TYPE_MODE (boolean_type_node);
6337 if (!target || !register_operand (target, mode))
6338 target = gen_reg_rtx (mode);
6340 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6341 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6342 if (target)
6343 return target;
6344 break;
6346 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6347 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6348 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6349 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6350 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6351 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6352 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6353 if (target)
6354 return target;
6355 break;
6357 case BUILT_IN_LOCK_TEST_AND_SET_1:
6358 case BUILT_IN_LOCK_TEST_AND_SET_2:
6359 case BUILT_IN_LOCK_TEST_AND_SET_4:
6360 case BUILT_IN_LOCK_TEST_AND_SET_8:
6361 case BUILT_IN_LOCK_TEST_AND_SET_16:
6362 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6363 target = expand_builtin_lock_test_and_set (mode, exp, target);
6364 if (target)
6365 return target;
6366 break;
6368 case BUILT_IN_LOCK_RELEASE_1:
6369 case BUILT_IN_LOCK_RELEASE_2:
6370 case BUILT_IN_LOCK_RELEASE_4:
6371 case BUILT_IN_LOCK_RELEASE_8:
6372 case BUILT_IN_LOCK_RELEASE_16:
6373 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6374 expand_builtin_lock_release (mode, exp);
6375 return const0_rtx;
6377 case BUILT_IN_SYNCHRONIZE:
6378 expand_builtin_synchronize ();
6379 return const0_rtx;
6381 case BUILT_IN_OBJECT_SIZE:
6382 return expand_builtin_object_size (exp);
6384 case BUILT_IN_MEMCPY_CHK:
6385 case BUILT_IN_MEMPCPY_CHK:
6386 case BUILT_IN_MEMMOVE_CHK:
6387 case BUILT_IN_MEMSET_CHK:
6388 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6389 if (target)
6390 return target;
6391 break;
6393 case BUILT_IN_STRCPY_CHK:
6394 case BUILT_IN_STPCPY_CHK:
6395 case BUILT_IN_STRNCPY_CHK:
6396 case BUILT_IN_STRCAT_CHK:
6397 case BUILT_IN_STRNCAT_CHK:
6398 case BUILT_IN_SNPRINTF_CHK:
6399 case BUILT_IN_VSNPRINTF_CHK:
6400 maybe_emit_chk_warning (exp, fcode);
6401 break;
6403 case BUILT_IN_SPRINTF_CHK:
6404 case BUILT_IN_VSPRINTF_CHK:
6405 maybe_emit_sprintf_chk_warning (exp, fcode);
6406 break;
6408 case BUILT_IN_FREE:
6409 maybe_emit_free_warning (exp);
6410 break;
6412 default: /* just do library call, if unknown builtin */
6413 break;
6416 /* The switch statement above can drop through to cause the function
6417 to be called normally. */
6418 return expand_call (exp, target, ignore);
6421 /* Determine whether a tree node represents a call to a built-in
6422 function. If the tree T is a call to a built-in function with
6423 the right number of arguments of the appropriate types, return
6424 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6425 Otherwise the return value is END_BUILTINS. */
6427 enum built_in_function
6428 builtin_mathfn_code (const_tree t)
6430 const_tree fndecl, arg, parmlist;
6431 const_tree argtype, parmtype;
6432 const_call_expr_arg_iterator iter;
6434 if (TREE_CODE (t) != CALL_EXPR
6435 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6436 return END_BUILTINS;
6438 fndecl = get_callee_fndecl (t);
6439 if (fndecl == NULL_TREE
6440 || TREE_CODE (fndecl) != FUNCTION_DECL
6441 || ! DECL_BUILT_IN (fndecl)
6442 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6443 return END_BUILTINS;
6445 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6446 init_const_call_expr_arg_iterator (t, &iter);
6447 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6449 /* If a function doesn't take a variable number of arguments,
6450 the last element in the list will have type `void'. */
6451 parmtype = TREE_VALUE (parmlist);
6452 if (VOID_TYPE_P (parmtype))
6454 if (more_const_call_expr_args_p (&iter))
6455 return END_BUILTINS;
6456 return DECL_FUNCTION_CODE (fndecl);
6459 if (! more_const_call_expr_args_p (&iter))
6460 return END_BUILTINS;
6462 arg = next_const_call_expr_arg (&iter);
6463 argtype = TREE_TYPE (arg);
6465 if (SCALAR_FLOAT_TYPE_P (parmtype))
6467 if (! SCALAR_FLOAT_TYPE_P (argtype))
6468 return END_BUILTINS;
6470 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6472 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6473 return END_BUILTINS;
6475 else if (POINTER_TYPE_P (parmtype))
6477 if (! POINTER_TYPE_P (argtype))
6478 return END_BUILTINS;
6480 else if (INTEGRAL_TYPE_P (parmtype))
6482 if (! INTEGRAL_TYPE_P (argtype))
6483 return END_BUILTINS;
6485 else
6486 return END_BUILTINS;
6489 /* Variable-length argument list. */
6490 return DECL_FUNCTION_CODE (fndecl);
6493 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6494 evaluate to a constant. */
6496 static tree
6497 fold_builtin_constant_p (tree arg)
6499 /* We return 1 for a numeric type that's known to be a constant
6500 value at compile-time or for an aggregate type that's a
6501 literal constant. */
6502 STRIP_NOPS (arg);
6504 /* If we know this is a constant, emit the constant of one. */
6505 if (CONSTANT_CLASS_P (arg)
6506 || (TREE_CODE (arg) == CONSTRUCTOR
6507 && TREE_CONSTANT (arg)))
6508 return integer_one_node;
6509 if (TREE_CODE (arg) == ADDR_EXPR)
6511 tree op = TREE_OPERAND (arg, 0);
6512 if (TREE_CODE (op) == STRING_CST
6513 || (TREE_CODE (op) == ARRAY_REF
6514 && integer_zerop (TREE_OPERAND (op, 1))
6515 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6516 return integer_one_node;
6519 /* If this expression has side effects, show we don't know it to be a
6520 constant. Likewise if it's a pointer or aggregate type since in
6521 those case we only want literals, since those are only optimized
6522 when generating RTL, not later.
6523 And finally, if we are compiling an initializer, not code, we
6524 need to return a definite result now; there's not going to be any
6525 more optimization done. */
6526 if (TREE_SIDE_EFFECTS (arg)
6527 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6528 || POINTER_TYPE_P (TREE_TYPE (arg))
6529 || cfun == 0
6530 || folding_initializer)
6531 return integer_zero_node;
6533 return NULL_TREE;
6536 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6537 return it as a truthvalue. */
6539 static tree
6540 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6542 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6544 fn = built_in_decls[BUILT_IN_EXPECT];
6545 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6546 ret_type = TREE_TYPE (TREE_TYPE (fn));
6547 pred_type = TREE_VALUE (arg_types);
6548 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6550 pred = fold_convert_loc (loc, pred_type, pred);
6551 expected = fold_convert_loc (loc, expected_type, expected);
6552 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6554 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6555 build_int_cst (ret_type, 0));
6558 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6559 NULL_TREE if no simplification is possible. */
6561 static tree
6562 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6564 tree inner, fndecl;
6565 enum tree_code code;
6567 /* If this is a builtin_expect within a builtin_expect keep the
6568 inner one. See through a comparison against a constant. It
6569 might have been added to create a thruthvalue. */
6570 inner = arg0;
6571 if (COMPARISON_CLASS_P (inner)
6572 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6573 inner = TREE_OPERAND (inner, 0);
6575 if (TREE_CODE (inner) == CALL_EXPR
6576 && (fndecl = get_callee_fndecl (inner))
6577 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6578 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6579 return arg0;
6581 /* Distribute the expected value over short-circuiting operators.
6582 See through the cast from truthvalue_type_node to long. */
6583 inner = arg0;
6584 while (TREE_CODE (inner) == NOP_EXPR
6585 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6586 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6587 inner = TREE_OPERAND (inner, 0);
6589 code = TREE_CODE (inner);
6590 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6592 tree op0 = TREE_OPERAND (inner, 0);
6593 tree op1 = TREE_OPERAND (inner, 1);
6595 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6596 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6597 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6599 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6602 /* If the argument isn't invariant then there's nothing else we can do. */
6603 if (!TREE_CONSTANT (arg0))
6604 return NULL_TREE;
6606 /* If we expect that a comparison against the argument will fold to
6607 a constant return the constant. In practice, this means a true
6608 constant or the address of a non-weak symbol. */
6609 inner = arg0;
6610 STRIP_NOPS (inner);
6611 if (TREE_CODE (inner) == ADDR_EXPR)
6615 inner = TREE_OPERAND (inner, 0);
6617 while (TREE_CODE (inner) == COMPONENT_REF
6618 || TREE_CODE (inner) == ARRAY_REF);
6619 if ((TREE_CODE (inner) == VAR_DECL
6620 || TREE_CODE (inner) == FUNCTION_DECL)
6621 && DECL_WEAK (inner))
6622 return NULL_TREE;
6625 /* Otherwise, ARG0 already has the proper type for the return value. */
6626 return arg0;
6629 /* Fold a call to __builtin_classify_type with argument ARG. */
6631 static tree
6632 fold_builtin_classify_type (tree arg)
6634 if (arg == 0)
6635 return build_int_cst (NULL_TREE, no_type_class);
6637 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6640 /* Fold a call to __builtin_strlen with argument ARG. */
6642 static tree
6643 fold_builtin_strlen (location_t loc, tree type, tree arg)
6645 if (!validate_arg (arg, POINTER_TYPE))
6646 return NULL_TREE;
6647 else
6649 tree len = c_strlen (arg, 0);
6651 if (len)
6652 return fold_convert_loc (loc, type, len);
6654 return NULL_TREE;
6658 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6660 static tree
6661 fold_builtin_inf (location_t loc, tree type, int warn)
6663 REAL_VALUE_TYPE real;
6665 /* __builtin_inff is intended to be usable to define INFINITY on all
6666 targets. If an infinity is not available, INFINITY expands "to a
6667 positive constant of type float that overflows at translation
6668 time", footnote "In this case, using INFINITY will violate the
6669 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6670 Thus we pedwarn to ensure this constraint violation is
6671 diagnosed. */
6672 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6673 pedwarn (loc, 0, "target format does not support infinity");
6675 real_inf (&real);
6676 return build_real (type, real);
6679 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6681 static tree
6682 fold_builtin_nan (tree arg, tree type, int quiet)
6684 REAL_VALUE_TYPE real;
6685 const char *str;
6687 if (!validate_arg (arg, POINTER_TYPE))
6688 return NULL_TREE;
6689 str = c_getstr (arg);
6690 if (!str)
6691 return NULL_TREE;
6693 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6694 return NULL_TREE;
6696 return build_real (type, real);
6699 /* Return true if the floating point expression T has an integer value.
6700 We also allow +Inf, -Inf and NaN to be considered integer values. */
6702 static bool
6703 integer_valued_real_p (tree t)
6705 switch (TREE_CODE (t))
6707 case FLOAT_EXPR:
6708 return true;
6710 case ABS_EXPR:
6711 case SAVE_EXPR:
6712 return integer_valued_real_p (TREE_OPERAND (t, 0));
6714 case COMPOUND_EXPR:
6715 case MODIFY_EXPR:
6716 case BIND_EXPR:
6717 return integer_valued_real_p (TREE_OPERAND (t, 1));
6719 case PLUS_EXPR:
6720 case MINUS_EXPR:
6721 case MULT_EXPR:
6722 case MIN_EXPR:
6723 case MAX_EXPR:
6724 return integer_valued_real_p (TREE_OPERAND (t, 0))
6725 && integer_valued_real_p (TREE_OPERAND (t, 1));
6727 case COND_EXPR:
6728 return integer_valued_real_p (TREE_OPERAND (t, 1))
6729 && integer_valued_real_p (TREE_OPERAND (t, 2));
6731 case REAL_CST:
6732 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6734 case NOP_EXPR:
6736 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6737 if (TREE_CODE (type) == INTEGER_TYPE)
6738 return true;
6739 if (TREE_CODE (type) == REAL_TYPE)
6740 return integer_valued_real_p (TREE_OPERAND (t, 0));
6741 break;
6744 case CALL_EXPR:
6745 switch (builtin_mathfn_code (t))
6747 CASE_FLT_FN (BUILT_IN_CEIL):
6748 CASE_FLT_FN (BUILT_IN_FLOOR):
6749 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6750 CASE_FLT_FN (BUILT_IN_RINT):
6751 CASE_FLT_FN (BUILT_IN_ROUND):
6752 CASE_FLT_FN (BUILT_IN_TRUNC):
6753 return true;
6755 CASE_FLT_FN (BUILT_IN_FMIN):
6756 CASE_FLT_FN (BUILT_IN_FMAX):
6757 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6758 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6760 default:
6761 break;
6763 break;
6765 default:
6766 break;
6768 return false;
6771 /* FNDECL is assumed to be a builtin where truncation can be propagated
6772 across (for instance floor((double)f) == (double)floorf (f).
6773 Do the transformation for a call with argument ARG. */
6775 static tree
6776 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6778 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6780 if (!validate_arg (arg, REAL_TYPE))
6781 return NULL_TREE;
6783 /* Integer rounding functions are idempotent. */
6784 if (fcode == builtin_mathfn_code (arg))
6785 return arg;
6787 /* If argument is already integer valued, and we don't need to worry
6788 about setting errno, there's no need to perform rounding. */
6789 if (! flag_errno_math && integer_valued_real_p (arg))
6790 return arg;
6792 if (optimize)
6794 tree arg0 = strip_float_extensions (arg);
6795 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6796 tree newtype = TREE_TYPE (arg0);
6797 tree decl;
6799 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6800 && (decl = mathfn_built_in (newtype, fcode)))
6801 return fold_convert_loc (loc, ftype,
6802 build_call_expr_loc (loc, decl, 1,
6803 fold_convert_loc (loc,
6804 newtype,
6805 arg0)));
6807 return NULL_TREE;
6810 /* FNDECL is assumed to be builtin which can narrow the FP type of
6811 the argument, for instance lround((double)f) -> lroundf (f).
6812 Do the transformation for a call with argument ARG. */
6814 static tree
6815 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6817 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6819 if (!validate_arg (arg, REAL_TYPE))
6820 return NULL_TREE;
6822 /* If argument is already integer valued, and we don't need to worry
6823 about setting errno, there's no need to perform rounding. */
6824 if (! flag_errno_math && integer_valued_real_p (arg))
6825 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6826 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6828 if (optimize)
6830 tree ftype = TREE_TYPE (arg);
6831 tree arg0 = strip_float_extensions (arg);
6832 tree newtype = TREE_TYPE (arg0);
6833 tree decl;
6835 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6836 && (decl = mathfn_built_in (newtype, fcode)))
6837 return build_call_expr_loc (loc, decl, 1,
6838 fold_convert_loc (loc, newtype, arg0));
6841 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6842 sizeof (long long) == sizeof (long). */
6843 if (TYPE_PRECISION (long_long_integer_type_node)
6844 == TYPE_PRECISION (long_integer_type_node))
6846 tree newfn = NULL_TREE;
6847 switch (fcode)
6849 CASE_FLT_FN (BUILT_IN_LLCEIL):
6850 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6851 break;
6853 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6854 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6855 break;
6857 CASE_FLT_FN (BUILT_IN_LLROUND):
6858 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6859 break;
6861 CASE_FLT_FN (BUILT_IN_LLRINT):
6862 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6863 break;
6865 default:
6866 break;
6869 if (newfn)
6871 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6872 return fold_convert_loc (loc,
6873 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6877 return NULL_TREE;
6880 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6881 return type. Return NULL_TREE if no simplification can be made. */
6883 static tree
6884 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6886 tree res;
6888 if (!validate_arg (arg, COMPLEX_TYPE)
6889 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6890 return NULL_TREE;
6892 /* Calculate the result when the argument is a constant. */
6893 if (TREE_CODE (arg) == COMPLEX_CST
6894 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6895 type, mpfr_hypot)))
6896 return res;
6898 if (TREE_CODE (arg) == COMPLEX_EXPR)
6900 tree real = TREE_OPERAND (arg, 0);
6901 tree imag = TREE_OPERAND (arg, 1);
6903 /* If either part is zero, cabs is fabs of the other. */
6904 if (real_zerop (real))
6905 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6906 if (real_zerop (imag))
6907 return fold_build1_loc (loc, ABS_EXPR, type, real);
6909 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6910 if (flag_unsafe_math_optimizations
6911 && operand_equal_p (real, imag, OEP_PURE_SAME))
6913 const REAL_VALUE_TYPE sqrt2_trunc
6914 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6915 STRIP_NOPS (real);
6916 return fold_build2_loc (loc, MULT_EXPR, type,
6917 fold_build1_loc (loc, ABS_EXPR, type, real),
6918 build_real (type, sqrt2_trunc));
6922 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6923 if (TREE_CODE (arg) == NEGATE_EXPR
6924 || TREE_CODE (arg) == CONJ_EXPR)
6925 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6927 /* Don't do this when optimizing for size. */
6928 if (flag_unsafe_math_optimizations
6929 && optimize && optimize_function_for_speed_p (cfun))
6931 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6933 if (sqrtfn != NULL_TREE)
6935 tree rpart, ipart, result;
6937 arg = builtin_save_expr (arg);
6939 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6940 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6942 rpart = builtin_save_expr (rpart);
6943 ipart = builtin_save_expr (ipart);
6945 result = fold_build2_loc (loc, PLUS_EXPR, type,
6946 fold_build2_loc (loc, MULT_EXPR, type,
6947 rpart, rpart),
6948 fold_build2_loc (loc, MULT_EXPR, type,
6949 ipart, ipart));
6951 return build_call_expr_loc (loc, sqrtfn, 1, result);
6955 return NULL_TREE;
6958 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6959 Return NULL_TREE if no simplification can be made. */
6961 static tree
6962 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6965 enum built_in_function fcode;
6966 tree res;
6968 if (!validate_arg (arg, REAL_TYPE))
6969 return NULL_TREE;
6971 /* Calculate the result when the argument is a constant. */
6972 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6973 return res;
6975 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6976 fcode = builtin_mathfn_code (arg);
6977 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6979 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6980 arg = fold_build2_loc (loc, MULT_EXPR, type,
6981 CALL_EXPR_ARG (arg, 0),
6982 build_real (type, dconsthalf));
6983 return build_call_expr_loc (loc, expfn, 1, arg);
6986 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6987 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6989 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6991 if (powfn)
6993 tree arg0 = CALL_EXPR_ARG (arg, 0);
6994 tree tree_root;
6995 /* The inner root was either sqrt or cbrt. */
6996 /* This was a conditional expression but it triggered a bug
6997 in Sun C 5.5. */
6998 REAL_VALUE_TYPE dconstroot;
6999 if (BUILTIN_SQRT_P (fcode))
7000 dconstroot = dconsthalf;
7001 else
7002 dconstroot = dconst_third ();
7004 /* Adjust for the outer root. */
7005 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7006 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7007 tree_root = build_real (type, dconstroot);
7008 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7012 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7013 if (flag_unsafe_math_optimizations
7014 && (fcode == BUILT_IN_POW
7015 || fcode == BUILT_IN_POWF
7016 || fcode == BUILT_IN_POWL))
7018 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7019 tree arg0 = CALL_EXPR_ARG (arg, 0);
7020 tree arg1 = CALL_EXPR_ARG (arg, 1);
7021 tree narg1;
7022 if (!tree_expr_nonnegative_p (arg0))
7023 arg0 = build1 (ABS_EXPR, type, arg0);
7024 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7025 build_real (type, dconsthalf));
7026 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7029 return NULL_TREE;
7032 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7033 Return NULL_TREE if no simplification can be made. */
7035 static tree
7036 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7038 const enum built_in_function fcode = builtin_mathfn_code (arg);
7039 tree res;
7041 if (!validate_arg (arg, REAL_TYPE))
7042 return NULL_TREE;
7044 /* Calculate the result when the argument is a constant. */
7045 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7046 return res;
7048 if (flag_unsafe_math_optimizations)
7050 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7051 if (BUILTIN_EXPONENT_P (fcode))
7053 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7054 const REAL_VALUE_TYPE third_trunc =
7055 real_value_truncate (TYPE_MODE (type), dconst_third ());
7056 arg = fold_build2_loc (loc, MULT_EXPR, type,
7057 CALL_EXPR_ARG (arg, 0),
7058 build_real (type, third_trunc));
7059 return build_call_expr_loc (loc, expfn, 1, arg);
7062 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7063 if (BUILTIN_SQRT_P (fcode))
7065 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7067 if (powfn)
7069 tree arg0 = CALL_EXPR_ARG (arg, 0);
7070 tree tree_root;
7071 REAL_VALUE_TYPE dconstroot = dconst_third ();
7073 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7074 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7075 tree_root = build_real (type, dconstroot);
7076 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7080 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7081 if (BUILTIN_CBRT_P (fcode))
7083 tree arg0 = CALL_EXPR_ARG (arg, 0);
7084 if (tree_expr_nonnegative_p (arg0))
7086 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7088 if (powfn)
7090 tree tree_root;
7091 REAL_VALUE_TYPE dconstroot;
7093 real_arithmetic (&dconstroot, MULT_EXPR,
7094 dconst_third_ptr (), dconst_third_ptr ());
7095 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7096 tree_root = build_real (type, dconstroot);
7097 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7102 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7103 if (fcode == BUILT_IN_POW
7104 || fcode == BUILT_IN_POWF
7105 || fcode == BUILT_IN_POWL)
7107 tree arg00 = CALL_EXPR_ARG (arg, 0);
7108 tree arg01 = CALL_EXPR_ARG (arg, 1);
7109 if (tree_expr_nonnegative_p (arg00))
7111 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7112 const REAL_VALUE_TYPE dconstroot
7113 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7114 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7115 build_real (type, dconstroot));
7116 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7120 return NULL_TREE;
7123 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7124 TYPE is the type of the return value. Return NULL_TREE if no
7125 simplification can be made. */
7127 static tree
7128 fold_builtin_cos (location_t loc,
7129 tree arg, tree type, tree fndecl)
7131 tree res, narg;
7133 if (!validate_arg (arg, REAL_TYPE))
7134 return NULL_TREE;
7136 /* Calculate the result when the argument is a constant. */
7137 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7138 return res;
7140 /* Optimize cos(-x) into cos (x). */
7141 if ((narg = fold_strip_sign_ops (arg)))
7142 return build_call_expr_loc (loc, fndecl, 1, narg);
7144 return NULL_TREE;
7147 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7148 Return NULL_TREE if no simplification can be made. */
7150 static tree
7151 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7153 if (validate_arg (arg, REAL_TYPE))
7155 tree res, narg;
7157 /* Calculate the result when the argument is a constant. */
7158 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7159 return res;
7161 /* Optimize cosh(-x) into cosh (x). */
7162 if ((narg = fold_strip_sign_ops (arg)))
7163 return build_call_expr_loc (loc, fndecl, 1, narg);
7166 return NULL_TREE;
7169 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7170 argument ARG. TYPE is the type of the return value. Return
7171 NULL_TREE if no simplification can be made. */
7173 static tree
7174 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7175 bool hyper)
7177 if (validate_arg (arg, COMPLEX_TYPE)
7178 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7180 tree tmp;
7182 /* Calculate the result when the argument is a constant. */
7183 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7184 return tmp;
7186 /* Optimize fn(-x) into fn(x). */
7187 if ((tmp = fold_strip_sign_ops (arg)))
7188 return build_call_expr_loc (loc, fndecl, 1, tmp);
7191 return NULL_TREE;
7194 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7195 Return NULL_TREE if no simplification can be made. */
7197 static tree
7198 fold_builtin_tan (tree arg, tree type)
7200 enum built_in_function fcode;
7201 tree res;
7203 if (!validate_arg (arg, REAL_TYPE))
7204 return NULL_TREE;
7206 /* Calculate the result when the argument is a constant. */
7207 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7208 return res;
7210 /* Optimize tan(atan(x)) = x. */
7211 fcode = builtin_mathfn_code (arg);
7212 if (flag_unsafe_math_optimizations
7213 && (fcode == BUILT_IN_ATAN
7214 || fcode == BUILT_IN_ATANF
7215 || fcode == BUILT_IN_ATANL))
7216 return CALL_EXPR_ARG (arg, 0);
7218 return NULL_TREE;
7221 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7222 NULL_TREE if no simplification can be made. */
7224 static tree
7225 fold_builtin_sincos (location_t loc,
7226 tree arg0, tree arg1, tree arg2)
7228 tree type;
7229 tree res, fn, call;
7231 if (!validate_arg (arg0, REAL_TYPE)
7232 || !validate_arg (arg1, POINTER_TYPE)
7233 || !validate_arg (arg2, POINTER_TYPE))
7234 return NULL_TREE;
7236 type = TREE_TYPE (arg0);
7238 /* Calculate the result when the argument is a constant. */
7239 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7240 return res;
7242 /* Canonicalize sincos to cexpi. */
7243 if (!TARGET_C99_FUNCTIONS)
7244 return NULL_TREE;
7245 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7246 if (!fn)
7247 return NULL_TREE;
7249 call = build_call_expr_loc (loc, fn, 1, arg0);
7250 call = builtin_save_expr (call);
7252 return build2 (COMPOUND_EXPR, void_type_node,
7253 build2 (MODIFY_EXPR, void_type_node,
7254 build_fold_indirect_ref_loc (loc, arg1),
7255 build1 (IMAGPART_EXPR, type, call)),
7256 build2 (MODIFY_EXPR, void_type_node,
7257 build_fold_indirect_ref_loc (loc, arg2),
7258 build1 (REALPART_EXPR, type, call)));
7261 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7262 NULL_TREE if no simplification can be made. */
7264 static tree
7265 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7267 tree rtype;
7268 tree realp, imagp, ifn;
7269 tree res;
7271 if (!validate_arg (arg0, COMPLEX_TYPE)
7272 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7273 return NULL_TREE;
7275 /* Calculate the result when the argument is a constant. */
7276 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7277 return res;
7279 rtype = TREE_TYPE (TREE_TYPE (arg0));
7281 /* In case we can figure out the real part of arg0 and it is constant zero
7282 fold to cexpi. */
7283 if (!TARGET_C99_FUNCTIONS)
7284 return NULL_TREE;
7285 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7286 if (!ifn)
7287 return NULL_TREE;
7289 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7290 && real_zerop (realp))
7292 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7293 return build_call_expr_loc (loc, ifn, 1, narg);
7296 /* In case we can easily decompose real and imaginary parts split cexp
7297 to exp (r) * cexpi (i). */
7298 if (flag_unsafe_math_optimizations
7299 && realp)
7301 tree rfn, rcall, icall;
7303 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7304 if (!rfn)
7305 return NULL_TREE;
7307 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7308 if (!imagp)
7309 return NULL_TREE;
7311 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7312 icall = builtin_save_expr (icall);
7313 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7314 rcall = builtin_save_expr (rcall);
7315 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7316 fold_build2_loc (loc, MULT_EXPR, rtype,
7317 rcall,
7318 fold_build1_loc (loc, REALPART_EXPR,
7319 rtype, icall)),
7320 fold_build2_loc (loc, MULT_EXPR, rtype,
7321 rcall,
7322 fold_build1_loc (loc, IMAGPART_EXPR,
7323 rtype, icall)));
7326 return NULL_TREE;
7329 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7330 Return NULL_TREE if no simplification can be made. */
7332 static tree
7333 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7335 if (!validate_arg (arg, REAL_TYPE))
7336 return NULL_TREE;
7338 /* Optimize trunc of constant value. */
7339 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7341 REAL_VALUE_TYPE r, x;
7342 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7344 x = TREE_REAL_CST (arg);
7345 real_trunc (&r, TYPE_MODE (type), &x);
7346 return build_real (type, r);
7349 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7352 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7353 Return NULL_TREE if no simplification can be made. */
7355 static tree
7356 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7358 if (!validate_arg (arg, REAL_TYPE))
7359 return NULL_TREE;
7361 /* Optimize floor of constant value. */
7362 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7364 REAL_VALUE_TYPE x;
7366 x = TREE_REAL_CST (arg);
7367 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7369 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7370 REAL_VALUE_TYPE r;
7372 real_floor (&r, TYPE_MODE (type), &x);
7373 return build_real (type, r);
7377 /* Fold floor (x) where x is nonnegative to trunc (x). */
7378 if (tree_expr_nonnegative_p (arg))
7380 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7381 if (truncfn)
7382 return build_call_expr_loc (loc, truncfn, 1, arg);
7385 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7388 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7389 Return NULL_TREE if no simplification can be made. */
7391 static tree
7392 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7394 if (!validate_arg (arg, REAL_TYPE))
7395 return NULL_TREE;
7397 /* Optimize ceil of constant value. */
7398 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7400 REAL_VALUE_TYPE x;
7402 x = TREE_REAL_CST (arg);
7403 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7405 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7406 REAL_VALUE_TYPE r;
7408 real_ceil (&r, TYPE_MODE (type), &x);
7409 return build_real (type, r);
7413 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7416 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7417 Return NULL_TREE if no simplification can be made. */
7419 static tree
7420 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7422 if (!validate_arg (arg, REAL_TYPE))
7423 return NULL_TREE;
7425 /* Optimize round of constant value. */
7426 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7428 REAL_VALUE_TYPE x;
7430 x = TREE_REAL_CST (arg);
7431 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7433 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7434 REAL_VALUE_TYPE r;
7436 real_round (&r, TYPE_MODE (type), &x);
7437 return build_real (type, r);
7441 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7444 /* Fold function call to builtin lround, lroundf or lroundl (or the
7445 corresponding long long versions) and other rounding functions. ARG
7446 is the argument to the call. Return NULL_TREE if no simplification
7447 can be made. */
7449 static tree
7450 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7452 if (!validate_arg (arg, REAL_TYPE))
7453 return NULL_TREE;
7455 /* Optimize lround of constant value. */
7456 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7458 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7460 if (real_isfinite (&x))
7462 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7463 tree ftype = TREE_TYPE (arg);
7464 unsigned HOST_WIDE_INT lo2;
7465 HOST_WIDE_INT hi, lo;
7466 REAL_VALUE_TYPE r;
7468 switch (DECL_FUNCTION_CODE (fndecl))
7470 CASE_FLT_FN (BUILT_IN_LFLOOR):
7471 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7472 real_floor (&r, TYPE_MODE (ftype), &x);
7473 break;
7475 CASE_FLT_FN (BUILT_IN_LCEIL):
7476 CASE_FLT_FN (BUILT_IN_LLCEIL):
7477 real_ceil (&r, TYPE_MODE (ftype), &x);
7478 break;
7480 CASE_FLT_FN (BUILT_IN_LROUND):
7481 CASE_FLT_FN (BUILT_IN_LLROUND):
7482 real_round (&r, TYPE_MODE (ftype), &x);
7483 break;
7485 default:
7486 gcc_unreachable ();
7489 REAL_VALUE_TO_INT (&lo, &hi, r);
7490 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7491 return build_int_cst_wide (itype, lo2, hi);
7495 switch (DECL_FUNCTION_CODE (fndecl))
7497 CASE_FLT_FN (BUILT_IN_LFLOOR):
7498 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7499 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7500 if (tree_expr_nonnegative_p (arg))
7501 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7502 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7503 break;
7504 default:;
7507 return fold_fixed_mathfn (loc, fndecl, arg);
7510 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7511 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7512 the argument to the call. Return NULL_TREE if no simplification can
7513 be made. */
7515 static tree
7516 fold_builtin_bitop (tree fndecl, tree arg)
7518 if (!validate_arg (arg, INTEGER_TYPE))
7519 return NULL_TREE;
7521 /* Optimize for constant argument. */
7522 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7524 HOST_WIDE_INT hi, width, result;
7525 unsigned HOST_WIDE_INT lo;
7526 tree type;
7528 type = TREE_TYPE (arg);
7529 width = TYPE_PRECISION (type);
7530 lo = TREE_INT_CST_LOW (arg);
7532 /* Clear all the bits that are beyond the type's precision. */
7533 if (width > HOST_BITS_PER_WIDE_INT)
7535 hi = TREE_INT_CST_HIGH (arg);
7536 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7537 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7539 else
7541 hi = 0;
7542 if (width < HOST_BITS_PER_WIDE_INT)
7543 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7546 switch (DECL_FUNCTION_CODE (fndecl))
7548 CASE_INT_FN (BUILT_IN_FFS):
7549 if (lo != 0)
7550 result = exact_log2 (lo & -lo) + 1;
7551 else if (hi != 0)
7552 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7553 else
7554 result = 0;
7555 break;
7557 CASE_INT_FN (BUILT_IN_CLZ):
7558 if (hi != 0)
7559 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7560 else if (lo != 0)
7561 result = width - floor_log2 (lo) - 1;
7562 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7563 result = width;
7564 break;
7566 CASE_INT_FN (BUILT_IN_CTZ):
7567 if (lo != 0)
7568 result = exact_log2 (lo & -lo);
7569 else if (hi != 0)
7570 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7571 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7572 result = width;
7573 break;
7575 CASE_INT_FN (BUILT_IN_POPCOUNT):
7576 result = 0;
7577 while (lo)
7578 result++, lo &= lo - 1;
7579 while (hi)
7580 result++, hi &= hi - 1;
7581 break;
7583 CASE_INT_FN (BUILT_IN_PARITY):
7584 result = 0;
7585 while (lo)
7586 result++, lo &= lo - 1;
7587 while (hi)
7588 result++, hi &= hi - 1;
7589 result &= 1;
7590 break;
7592 default:
7593 gcc_unreachable ();
7596 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7599 return NULL_TREE;
7602 /* Fold function call to builtin_bswap and the long and long long
7603 variants. Return NULL_TREE if no simplification can be made. */
7604 static tree
7605 fold_builtin_bswap (tree fndecl, tree arg)
7607 if (! validate_arg (arg, INTEGER_TYPE))
7608 return NULL_TREE;
7610 /* Optimize constant value. */
7611 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7613 HOST_WIDE_INT hi, width, r_hi = 0;
7614 unsigned HOST_WIDE_INT lo, r_lo = 0;
7615 tree type;
7617 type = TREE_TYPE (arg);
7618 width = TYPE_PRECISION (type);
7619 lo = TREE_INT_CST_LOW (arg);
7620 hi = TREE_INT_CST_HIGH (arg);
7622 switch (DECL_FUNCTION_CODE (fndecl))
7624 case BUILT_IN_BSWAP32:
7625 case BUILT_IN_BSWAP64:
7627 int s;
7629 for (s = 0; s < width; s += 8)
7631 int d = width - s - 8;
7632 unsigned HOST_WIDE_INT byte;
7634 if (s < HOST_BITS_PER_WIDE_INT)
7635 byte = (lo >> s) & 0xff;
7636 else
7637 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7639 if (d < HOST_BITS_PER_WIDE_INT)
7640 r_lo |= byte << d;
7641 else
7642 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7646 break;
7648 default:
7649 gcc_unreachable ();
7652 if (width < HOST_BITS_PER_WIDE_INT)
7653 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7654 else
7655 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7658 return NULL_TREE;
7661 /* A subroutine of fold_builtin to fold the various logarithmic
7662 functions. Return NULL_TREE if no simplification can me made.
7663 FUNC is the corresponding MPFR logarithm function. */
7665 static tree
7666 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7667 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7669 if (validate_arg (arg, REAL_TYPE))
7671 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7672 tree res;
7673 const enum built_in_function fcode = builtin_mathfn_code (arg);
7675 /* Calculate the result when the argument is a constant. */
7676 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7677 return res;
7679 /* Special case, optimize logN(expN(x)) = x. */
7680 if (flag_unsafe_math_optimizations
7681 && ((func == mpfr_log
7682 && (fcode == BUILT_IN_EXP
7683 || fcode == BUILT_IN_EXPF
7684 || fcode == BUILT_IN_EXPL))
7685 || (func == mpfr_log2
7686 && (fcode == BUILT_IN_EXP2
7687 || fcode == BUILT_IN_EXP2F
7688 || fcode == BUILT_IN_EXP2L))
7689 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7690 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7692 /* Optimize logN(func()) for various exponential functions. We
7693 want to determine the value "x" and the power "exponent" in
7694 order to transform logN(x**exponent) into exponent*logN(x). */
7695 if (flag_unsafe_math_optimizations)
7697 tree exponent = 0, x = 0;
7699 switch (fcode)
7701 CASE_FLT_FN (BUILT_IN_EXP):
7702 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7703 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7704 dconst_e ()));
7705 exponent = CALL_EXPR_ARG (arg, 0);
7706 break;
7707 CASE_FLT_FN (BUILT_IN_EXP2):
7708 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7709 x = build_real (type, dconst2);
7710 exponent = CALL_EXPR_ARG (arg, 0);
7711 break;
7712 CASE_FLT_FN (BUILT_IN_EXP10):
7713 CASE_FLT_FN (BUILT_IN_POW10):
7714 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7716 REAL_VALUE_TYPE dconst10;
7717 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7718 x = build_real (type, dconst10);
7720 exponent = CALL_EXPR_ARG (arg, 0);
7721 break;
7722 CASE_FLT_FN (BUILT_IN_SQRT):
7723 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7724 x = CALL_EXPR_ARG (arg, 0);
7725 exponent = build_real (type, dconsthalf);
7726 break;
7727 CASE_FLT_FN (BUILT_IN_CBRT):
7728 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7729 x = CALL_EXPR_ARG (arg, 0);
7730 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7731 dconst_third ()));
7732 break;
7733 CASE_FLT_FN (BUILT_IN_POW):
7734 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7735 x = CALL_EXPR_ARG (arg, 0);
7736 exponent = CALL_EXPR_ARG (arg, 1);
7737 break;
7738 default:
7739 break;
7742 /* Now perform the optimization. */
7743 if (x && exponent)
7745 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7746 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7751 return NULL_TREE;
7754 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7755 NULL_TREE if no simplification can be made. */
7757 static tree
7758 fold_builtin_hypot (location_t loc, tree fndecl,
7759 tree arg0, tree arg1, tree type)
7761 tree res, narg0, narg1;
7763 if (!validate_arg (arg0, REAL_TYPE)
7764 || !validate_arg (arg1, REAL_TYPE))
7765 return NULL_TREE;
7767 /* Calculate the result when the argument is a constant. */
7768 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7769 return res;
7771 /* If either argument to hypot has a negate or abs, strip that off.
7772 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7773 narg0 = fold_strip_sign_ops (arg0);
7774 narg1 = fold_strip_sign_ops (arg1);
7775 if (narg0 || narg1)
7777 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7778 narg1 ? narg1 : arg1);
7781 /* If either argument is zero, hypot is fabs of the other. */
7782 if (real_zerop (arg0))
7783 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7784 else if (real_zerop (arg1))
7785 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7787 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7788 if (flag_unsafe_math_optimizations
7789 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7791 const REAL_VALUE_TYPE sqrt2_trunc
7792 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7793 return fold_build2_loc (loc, MULT_EXPR, type,
7794 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7795 build_real (type, sqrt2_trunc));
7798 return NULL_TREE;
7802 /* Fold a builtin function call to pow, powf, or powl. Return
7803 NULL_TREE if no simplification can be made. */
7804 static tree
7805 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7807 tree res;
7809 if (!validate_arg (arg0, REAL_TYPE)
7810 || !validate_arg (arg1, REAL_TYPE))
7811 return NULL_TREE;
7813 /* Calculate the result when the argument is a constant. */
7814 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7815 return res;
7817 /* Optimize pow(1.0,y) = 1.0. */
7818 if (real_onep (arg0))
7819 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7821 if (TREE_CODE (arg1) == REAL_CST
7822 && !TREE_OVERFLOW (arg1))
7824 REAL_VALUE_TYPE cint;
7825 REAL_VALUE_TYPE c;
7826 HOST_WIDE_INT n;
7828 c = TREE_REAL_CST (arg1);
7830 /* Optimize pow(x,0.0) = 1.0. */
7831 if (REAL_VALUES_EQUAL (c, dconst0))
7832 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7833 arg0);
7835 /* Optimize pow(x,1.0) = x. */
7836 if (REAL_VALUES_EQUAL (c, dconst1))
7837 return arg0;
7839 /* Optimize pow(x,-1.0) = 1.0/x. */
7840 if (REAL_VALUES_EQUAL (c, dconstm1))
7841 return fold_build2_loc (loc, RDIV_EXPR, type,
7842 build_real (type, dconst1), arg0);
7844 /* Optimize pow(x,0.5) = sqrt(x). */
7845 if (flag_unsafe_math_optimizations
7846 && REAL_VALUES_EQUAL (c, dconsthalf))
7848 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7850 if (sqrtfn != NULL_TREE)
7851 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7854 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7855 if (flag_unsafe_math_optimizations)
7857 const REAL_VALUE_TYPE dconstroot
7858 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7860 if (REAL_VALUES_EQUAL (c, dconstroot))
7862 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7863 if (cbrtfn != NULL_TREE)
7864 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7868 /* Check for an integer exponent. */
7869 n = real_to_integer (&c);
7870 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7871 if (real_identical (&c, &cint))
7873 /* Attempt to evaluate pow at compile-time, unless this should
7874 raise an exception. */
7875 if (TREE_CODE (arg0) == REAL_CST
7876 && !TREE_OVERFLOW (arg0)
7877 && (n > 0
7878 || (!flag_trapping_math && !flag_errno_math)
7879 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7881 REAL_VALUE_TYPE x;
7882 bool inexact;
7884 x = TREE_REAL_CST (arg0);
7885 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7886 if (flag_unsafe_math_optimizations || !inexact)
7887 return build_real (type, x);
7890 /* Strip sign ops from even integer powers. */
7891 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7893 tree narg0 = fold_strip_sign_ops (arg0);
7894 if (narg0)
7895 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7900 if (flag_unsafe_math_optimizations)
7902 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7904 /* Optimize pow(expN(x),y) = expN(x*y). */
7905 if (BUILTIN_EXPONENT_P (fcode))
7907 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7908 tree arg = CALL_EXPR_ARG (arg0, 0);
7909 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7910 return build_call_expr_loc (loc, expfn, 1, arg);
7913 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7914 if (BUILTIN_SQRT_P (fcode))
7916 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7917 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7918 build_real (type, dconsthalf));
7919 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7922 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7923 if (BUILTIN_CBRT_P (fcode))
7925 tree arg = CALL_EXPR_ARG (arg0, 0);
7926 if (tree_expr_nonnegative_p (arg))
7928 const REAL_VALUE_TYPE dconstroot
7929 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7930 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7931 build_real (type, dconstroot));
7932 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7936 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7937 if (fcode == BUILT_IN_POW
7938 || fcode == BUILT_IN_POWF
7939 || fcode == BUILT_IN_POWL)
7941 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7942 if (tree_expr_nonnegative_p (arg00))
7944 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7945 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7946 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7951 return NULL_TREE;
7954 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7955 Return NULL_TREE if no simplification can be made. */
7956 static tree
7957 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7958 tree arg0, tree arg1, tree type)
7960 if (!validate_arg (arg0, REAL_TYPE)
7961 || !validate_arg (arg1, INTEGER_TYPE))
7962 return NULL_TREE;
7964 /* Optimize pow(1.0,y) = 1.0. */
7965 if (real_onep (arg0))
7966 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7968 if (host_integerp (arg1, 0))
7970 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7972 /* Evaluate powi at compile-time. */
7973 if (TREE_CODE (arg0) == REAL_CST
7974 && !TREE_OVERFLOW (arg0))
7976 REAL_VALUE_TYPE x;
7977 x = TREE_REAL_CST (arg0);
7978 real_powi (&x, TYPE_MODE (type), &x, c);
7979 return build_real (type, x);
7982 /* Optimize pow(x,0) = 1.0. */
7983 if (c == 0)
7984 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7985 arg0);
7987 /* Optimize pow(x,1) = x. */
7988 if (c == 1)
7989 return arg0;
7991 /* Optimize pow(x,-1) = 1.0/x. */
7992 if (c == -1)
7993 return fold_build2_loc (loc, RDIV_EXPR, type,
7994 build_real (type, dconst1), arg0);
7997 return NULL_TREE;
8000 /* A subroutine of fold_builtin to fold the various exponent
8001 functions. Return NULL_TREE if no simplification can be made.
8002 FUNC is the corresponding MPFR exponent function. */
8004 static tree
8005 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8006 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8008 if (validate_arg (arg, REAL_TYPE))
8010 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8011 tree res;
8013 /* Calculate the result when the argument is a constant. */
8014 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8015 return res;
8017 /* Optimize expN(logN(x)) = x. */
8018 if (flag_unsafe_math_optimizations)
8020 const enum built_in_function fcode = builtin_mathfn_code (arg);
8022 if ((func == mpfr_exp
8023 && (fcode == BUILT_IN_LOG
8024 || fcode == BUILT_IN_LOGF
8025 || fcode == BUILT_IN_LOGL))
8026 || (func == mpfr_exp2
8027 && (fcode == BUILT_IN_LOG2
8028 || fcode == BUILT_IN_LOG2F
8029 || fcode == BUILT_IN_LOG2L))
8030 || (func == mpfr_exp10
8031 && (fcode == BUILT_IN_LOG10
8032 || fcode == BUILT_IN_LOG10F
8033 || fcode == BUILT_IN_LOG10L)))
8034 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8038 return NULL_TREE;
8041 /* Return true if VAR is a VAR_DECL or a component thereof. */
8043 static bool
8044 var_decl_component_p (tree var)
8046 tree inner = var;
8047 while (handled_component_p (inner))
8048 inner = TREE_OPERAND (inner, 0);
8049 return SSA_VAR_P (inner);
8052 /* Fold function call to builtin memset. Return
8053 NULL_TREE if no simplification can be made. */
8055 static tree
8056 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8057 tree type, bool ignore)
8059 tree var, ret, etype;
8060 unsigned HOST_WIDE_INT length, cval;
8062 if (! validate_arg (dest, POINTER_TYPE)
8063 || ! validate_arg (c, INTEGER_TYPE)
8064 || ! validate_arg (len, INTEGER_TYPE))
8065 return NULL_TREE;
8067 if (! host_integerp (len, 1))
8068 return NULL_TREE;
8070 /* If the LEN parameter is zero, return DEST. */
8071 if (integer_zerop (len))
8072 return omit_one_operand_loc (loc, type, dest, c);
8074 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8075 return NULL_TREE;
8077 var = dest;
8078 STRIP_NOPS (var);
8079 if (TREE_CODE (var) != ADDR_EXPR)
8080 return NULL_TREE;
8082 var = TREE_OPERAND (var, 0);
8083 if (TREE_THIS_VOLATILE (var))
8084 return NULL_TREE;
8086 etype = TREE_TYPE (var);
8087 if (TREE_CODE (etype) == ARRAY_TYPE)
8088 etype = TREE_TYPE (etype);
8090 if (!INTEGRAL_TYPE_P (etype)
8091 && !POINTER_TYPE_P (etype))
8092 return NULL_TREE;
8094 if (! var_decl_component_p (var))
8095 return NULL_TREE;
8097 length = tree_low_cst (len, 1);
8098 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8099 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8100 < (int) length)
8101 return NULL_TREE;
8103 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8104 return NULL_TREE;
8106 if (integer_zerop (c))
8107 cval = 0;
8108 else
8110 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8111 return NULL_TREE;
8113 cval = tree_low_cst (c, 1);
8114 cval &= 0xff;
8115 cval |= cval << 8;
8116 cval |= cval << 16;
8117 cval |= (cval << 31) << 1;
8120 ret = build_int_cst_type (etype, cval);
8121 var = build_fold_indirect_ref_loc (loc,
8122 fold_convert_loc (loc,
8123 build_pointer_type (etype),
8124 dest));
8125 ret = build2 (MODIFY_EXPR, etype, var, ret);
8126 if (ignore)
8127 return ret;
8129 return omit_one_operand_loc (loc, type, dest, ret);
8132 /* Fold function call to builtin memset. Return
8133 NULL_TREE if no simplification can be made. */
8135 static tree
8136 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8138 if (! validate_arg (dest, POINTER_TYPE)
8139 || ! validate_arg (size, INTEGER_TYPE))
8140 return NULL_TREE;
8142 if (!ignore)
8143 return NULL_TREE;
8145 /* New argument list transforming bzero(ptr x, int y) to
8146 memset(ptr x, int 0, size_t y). This is done this way
8147 so that if it isn't expanded inline, we fallback to
8148 calling bzero instead of memset. */
8150 return fold_builtin_memset (loc, dest, integer_zero_node,
8151 fold_convert_loc (loc, sizetype, size),
8152 void_type_node, ignore);
8155 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8156 NULL_TREE if no simplification can be made.
8157 If ENDP is 0, return DEST (like memcpy).
8158 If ENDP is 1, return DEST+LEN (like mempcpy).
8159 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8160 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8161 (memmove). */
8163 static tree
8164 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8165 tree len, tree type, bool ignore, int endp)
8167 tree destvar, srcvar, expr;
8169 if (! validate_arg (dest, POINTER_TYPE)
8170 || ! validate_arg (src, POINTER_TYPE)
8171 || ! validate_arg (len, INTEGER_TYPE))
8172 return NULL_TREE;
8174 /* If the LEN parameter is zero, return DEST. */
8175 if (integer_zerop (len))
8176 return omit_one_operand_loc (loc, type, dest, src);
8178 /* If SRC and DEST are the same (and not volatile), return
8179 DEST{,+LEN,+LEN-1}. */
8180 if (operand_equal_p (src, dest, 0))
8181 expr = len;
8182 else
8184 tree srctype, desttype;
8185 int src_align, dest_align;
8187 if (endp == 3)
8189 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8190 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8192 /* Both DEST and SRC must be pointer types.
8193 ??? This is what old code did. Is the testing for pointer types
8194 really mandatory?
8196 If either SRC is readonly or length is 1, we can use memcpy. */
8197 if (!dest_align || !src_align)
8198 return NULL_TREE;
8199 if (readonly_data_expr (src)
8200 || (host_integerp (len, 1)
8201 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8202 >= tree_low_cst (len, 1))))
8204 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8205 if (!fn)
8206 return NULL_TREE;
8207 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8210 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8211 srcvar = build_fold_indirect_ref_loc (loc, src);
8212 destvar = build_fold_indirect_ref_loc (loc, dest);
8213 if (srcvar
8214 && !TREE_THIS_VOLATILE (srcvar)
8215 && destvar
8216 && !TREE_THIS_VOLATILE (destvar))
8218 tree src_base, dest_base, fn;
8219 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8220 HOST_WIDE_INT size = -1;
8221 HOST_WIDE_INT maxsize = -1;
8223 src_base = srcvar;
8224 if (handled_component_p (src_base))
8225 src_base = get_ref_base_and_extent (src_base, &src_offset,
8226 &size, &maxsize);
8227 dest_base = destvar;
8228 if (handled_component_p (dest_base))
8229 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8230 &size, &maxsize);
8231 if (host_integerp (len, 1))
8233 maxsize = tree_low_cst (len, 1);
8234 if (maxsize
8235 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8236 maxsize = -1;
8237 else
8238 maxsize *= BITS_PER_UNIT;
8240 else
8241 maxsize = -1;
8242 if (SSA_VAR_P (src_base)
8243 && SSA_VAR_P (dest_base))
8245 if (operand_equal_p (src_base, dest_base, 0)
8246 && ranges_overlap_p (src_offset, maxsize,
8247 dest_offset, maxsize))
8248 return NULL_TREE;
8250 else if (TREE_CODE (src_base) == INDIRECT_REF
8251 && TREE_CODE (dest_base) == INDIRECT_REF)
8253 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8254 TREE_OPERAND (dest_base, 0), 0)
8255 || ranges_overlap_p (src_offset, maxsize,
8256 dest_offset, maxsize))
8257 return NULL_TREE;
8259 else
8260 return NULL_TREE;
8262 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8263 if (!fn)
8264 return NULL_TREE;
8265 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8267 return NULL_TREE;
8270 if (!host_integerp (len, 0))
8271 return NULL_TREE;
8272 /* FIXME:
8273 This logic lose for arguments like (type *)malloc (sizeof (type)),
8274 since we strip the casts of up to VOID return value from malloc.
8275 Perhaps we ought to inherit type from non-VOID argument here? */
8276 STRIP_NOPS (src);
8277 STRIP_NOPS (dest);
8278 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8279 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8281 tree tem = TREE_OPERAND (src, 0);
8282 STRIP_NOPS (tem);
8283 if (tem != TREE_OPERAND (src, 0))
8284 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8286 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8288 tree tem = TREE_OPERAND (dest, 0);
8289 STRIP_NOPS (tem);
8290 if (tem != TREE_OPERAND (dest, 0))
8291 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8293 srctype = TREE_TYPE (TREE_TYPE (src));
8294 if (srctype
8295 && TREE_CODE (srctype) == ARRAY_TYPE
8296 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8298 srctype = TREE_TYPE (srctype);
8299 STRIP_NOPS (src);
8300 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8302 desttype = TREE_TYPE (TREE_TYPE (dest));
8303 if (desttype
8304 && TREE_CODE (desttype) == ARRAY_TYPE
8305 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8307 desttype = TREE_TYPE (desttype);
8308 STRIP_NOPS (dest);
8309 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8311 if (!srctype || !desttype
8312 || !TYPE_SIZE_UNIT (srctype)
8313 || !TYPE_SIZE_UNIT (desttype)
8314 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8315 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8316 || TYPE_VOLATILE (srctype)
8317 || TYPE_VOLATILE (desttype))
8318 return NULL_TREE;
8320 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8321 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8322 if (dest_align < (int) TYPE_ALIGN (desttype)
8323 || src_align < (int) TYPE_ALIGN (srctype))
8324 return NULL_TREE;
8326 if (!ignore)
8327 dest = builtin_save_expr (dest);
8329 srcvar = NULL_TREE;
8330 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8332 srcvar = build_fold_indirect_ref_loc (loc, src);
8333 if (TREE_THIS_VOLATILE (srcvar))
8334 return NULL_TREE;
8335 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8336 srcvar = NULL_TREE;
8337 /* With memcpy, it is possible to bypass aliasing rules, so without
8338 this check i.e. execute/20060930-2.c would be misoptimized,
8339 because it use conflicting alias set to hold argument for the
8340 memcpy call. This check is probably unnecessary with
8341 -fno-strict-aliasing. Similarly for destvar. See also
8342 PR29286. */
8343 else if (!var_decl_component_p (srcvar))
8344 srcvar = NULL_TREE;
8347 destvar = NULL_TREE;
8348 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8350 destvar = build_fold_indirect_ref_loc (loc, dest);
8351 if (TREE_THIS_VOLATILE (destvar))
8352 return NULL_TREE;
8353 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8354 destvar = NULL_TREE;
8355 else if (!var_decl_component_p (destvar))
8356 destvar = NULL_TREE;
8359 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8360 return NULL_TREE;
8362 if (srcvar == NULL_TREE)
8364 tree srcptype;
8365 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8366 return NULL_TREE;
8368 srctype = build_qualified_type (desttype, 0);
8369 if (src_align < (int) TYPE_ALIGN (srctype))
8371 if (AGGREGATE_TYPE_P (srctype)
8372 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8373 return NULL_TREE;
8375 srctype = build_variant_type_copy (srctype);
8376 TYPE_ALIGN (srctype) = src_align;
8377 TYPE_USER_ALIGN (srctype) = 1;
8378 TYPE_PACKED (srctype) = 1;
8380 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8381 src = fold_convert_loc (loc, srcptype, src);
8382 srcvar = build_fold_indirect_ref_loc (loc, src);
8384 else if (destvar == NULL_TREE)
8386 tree destptype;
8387 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8388 return NULL_TREE;
8390 desttype = build_qualified_type (srctype, 0);
8391 if (dest_align < (int) TYPE_ALIGN (desttype))
8393 if (AGGREGATE_TYPE_P (desttype)
8394 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8395 return NULL_TREE;
8397 desttype = build_variant_type_copy (desttype);
8398 TYPE_ALIGN (desttype) = dest_align;
8399 TYPE_USER_ALIGN (desttype) = 1;
8400 TYPE_PACKED (desttype) = 1;
8402 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8403 dest = fold_convert_loc (loc, destptype, dest);
8404 destvar = build_fold_indirect_ref_loc (loc, dest);
8407 if (srctype == desttype
8408 || (gimple_in_ssa_p (cfun)
8409 && useless_type_conversion_p (desttype, srctype)))
8410 expr = srcvar;
8411 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8412 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8413 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8414 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8415 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8416 else
8417 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8418 TREE_TYPE (destvar), srcvar);
8419 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8422 if (ignore)
8423 return expr;
8425 if (endp == 0 || endp == 3)
8426 return omit_one_operand_loc (loc, type, dest, expr);
8428 if (expr == len)
8429 expr = NULL_TREE;
8431 if (endp == 2)
8432 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8433 ssize_int (1));
8435 len = fold_convert_loc (loc, sizetype, len);
8436 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8437 dest = fold_convert_loc (loc, type, dest);
8438 if (expr)
8439 dest = omit_one_operand_loc (loc, type, dest, expr);
8440 return dest;
8443 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8444 If LEN is not NULL, it represents the length of the string to be
8445 copied. Return NULL_TREE if no simplification can be made. */
8447 tree
8448 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8450 tree fn;
8452 if (!validate_arg (dest, POINTER_TYPE)
8453 || !validate_arg (src, POINTER_TYPE))
8454 return NULL_TREE;
8456 /* If SRC and DEST are the same (and not volatile), return DEST. */
8457 if (operand_equal_p (src, dest, 0))
8458 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8460 if (optimize_function_for_size_p (cfun))
8461 return NULL_TREE;
8463 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8464 if (!fn)
8465 return NULL_TREE;
8467 if (!len)
8469 len = c_strlen (src, 1);
8470 if (! len || TREE_SIDE_EFFECTS (len))
8471 return NULL_TREE;
8474 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8475 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8476 build_call_expr_loc (loc, fn, 3, dest, src, len));
8479 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8480 Return NULL_TREE if no simplification can be made. */
8482 static tree
8483 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8485 tree fn, len, lenp1, call, type;
8487 if (!validate_arg (dest, POINTER_TYPE)
8488 || !validate_arg (src, POINTER_TYPE))
8489 return NULL_TREE;
8491 len = c_strlen (src, 1);
8492 if (!len
8493 || TREE_CODE (len) != INTEGER_CST)
8494 return NULL_TREE;
8496 if (optimize_function_for_size_p (cfun)
8497 /* If length is zero it's small enough. */
8498 && !integer_zerop (len))
8499 return NULL_TREE;
8501 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8502 if (!fn)
8503 return NULL_TREE;
8505 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8506 /* We use dest twice in building our expression. Save it from
8507 multiple expansions. */
8508 dest = builtin_save_expr (dest);
8509 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8511 type = TREE_TYPE (TREE_TYPE (fndecl));
8512 len = fold_convert_loc (loc, sizetype, len);
8513 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8514 dest = fold_convert_loc (loc, type, dest);
8515 dest = omit_one_operand_loc (loc, type, dest, call);
8516 return dest;
8519 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8520 If SLEN is not NULL, it represents the length of the source string.
8521 Return NULL_TREE if no simplification can be made. */
8523 tree
8524 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8525 tree src, tree len, tree slen)
8527 tree fn;
8529 if (!validate_arg (dest, POINTER_TYPE)
8530 || !validate_arg (src, POINTER_TYPE)
8531 || !validate_arg (len, INTEGER_TYPE))
8532 return NULL_TREE;
8534 /* If the LEN parameter is zero, return DEST. */
8535 if (integer_zerop (len))
8536 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8538 /* We can't compare slen with len as constants below if len is not a
8539 constant. */
8540 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8541 return NULL_TREE;
8543 if (!slen)
8544 slen = c_strlen (src, 1);
8546 /* Now, we must be passed a constant src ptr parameter. */
8547 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8548 return NULL_TREE;
8550 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8552 /* We do not support simplification of this case, though we do
8553 support it when expanding trees into RTL. */
8554 /* FIXME: generate a call to __builtin_memset. */
8555 if (tree_int_cst_lt (slen, len))
8556 return NULL_TREE;
8558 /* OK transform into builtin memcpy. */
8559 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8560 if (!fn)
8561 return NULL_TREE;
8562 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8563 build_call_expr_loc (loc, fn, 3, dest, src, len));
8566 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8567 arguments to the call, and TYPE is its return type.
8568 Return NULL_TREE if no simplification can be made. */
8570 static tree
8571 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8573 if (!validate_arg (arg1, POINTER_TYPE)
8574 || !validate_arg (arg2, INTEGER_TYPE)
8575 || !validate_arg (len, INTEGER_TYPE))
8576 return NULL_TREE;
8577 else
8579 const char *p1;
8581 if (TREE_CODE (arg2) != INTEGER_CST
8582 || !host_integerp (len, 1))
8583 return NULL_TREE;
8585 p1 = c_getstr (arg1);
8586 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8588 char c;
8589 const char *r;
8590 tree tem;
8592 if (target_char_cast (arg2, &c))
8593 return NULL_TREE;
8595 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8597 if (r == NULL)
8598 return build_int_cst (TREE_TYPE (arg1), 0);
8600 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8601 size_int (r - p1));
8602 return fold_convert_loc (loc, type, tem);
8604 return NULL_TREE;
8608 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8609 Return NULL_TREE if no simplification can be made. */
8611 static tree
8612 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8614 const char *p1, *p2;
8616 if (!validate_arg (arg1, POINTER_TYPE)
8617 || !validate_arg (arg2, POINTER_TYPE)
8618 || !validate_arg (len, INTEGER_TYPE))
8619 return NULL_TREE;
8621 /* If the LEN parameter is zero, return zero. */
8622 if (integer_zerop (len))
8623 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8624 arg1, arg2);
8626 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8627 if (operand_equal_p (arg1, arg2, 0))
8628 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8630 p1 = c_getstr (arg1);
8631 p2 = c_getstr (arg2);
8633 /* If all arguments are constant, and the value of len is not greater
8634 than the lengths of arg1 and arg2, evaluate at compile-time. */
8635 if (host_integerp (len, 1) && p1 && p2
8636 && compare_tree_int (len, strlen (p1) + 1) <= 0
8637 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8639 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8641 if (r > 0)
8642 return integer_one_node;
8643 else if (r < 0)
8644 return integer_minus_one_node;
8645 else
8646 return integer_zero_node;
8649 /* If len parameter is one, return an expression corresponding to
8650 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8651 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8653 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8654 tree cst_uchar_ptr_node
8655 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8657 tree ind1
8658 = fold_convert_loc (loc, integer_type_node,
8659 build1 (INDIRECT_REF, cst_uchar_node,
8660 fold_convert_loc (loc,
8661 cst_uchar_ptr_node,
8662 arg1)));
8663 tree ind2
8664 = fold_convert_loc (loc, integer_type_node,
8665 build1 (INDIRECT_REF, cst_uchar_node,
8666 fold_convert_loc (loc,
8667 cst_uchar_ptr_node,
8668 arg2)));
8669 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8672 return NULL_TREE;
8675 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8676 Return NULL_TREE if no simplification can be made. */
8678 static tree
8679 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8681 const char *p1, *p2;
8683 if (!validate_arg (arg1, POINTER_TYPE)
8684 || !validate_arg (arg2, POINTER_TYPE))
8685 return NULL_TREE;
8687 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8688 if (operand_equal_p (arg1, arg2, 0))
8689 return integer_zero_node;
8691 p1 = c_getstr (arg1);
8692 p2 = c_getstr (arg2);
8694 if (p1 && p2)
8696 const int i = strcmp (p1, p2);
8697 if (i < 0)
8698 return integer_minus_one_node;
8699 else if (i > 0)
8700 return integer_one_node;
8701 else
8702 return integer_zero_node;
8705 /* If the second arg is "", return *(const unsigned char*)arg1. */
8706 if (p2 && *p2 == '\0')
8708 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8709 tree cst_uchar_ptr_node
8710 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8712 return fold_convert_loc (loc, integer_type_node,
8713 build1 (INDIRECT_REF, cst_uchar_node,
8714 fold_convert_loc (loc,
8715 cst_uchar_ptr_node,
8716 arg1)));
8719 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8720 if (p1 && *p1 == '\0')
8722 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8723 tree cst_uchar_ptr_node
8724 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8726 tree temp
8727 = fold_convert_loc (loc, integer_type_node,
8728 build1 (INDIRECT_REF, cst_uchar_node,
8729 fold_convert_loc (loc,
8730 cst_uchar_ptr_node,
8731 arg2)));
8732 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8735 return NULL_TREE;
8738 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8739 Return NULL_TREE if no simplification can be made. */
8741 static tree
8742 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8744 const char *p1, *p2;
8746 if (!validate_arg (arg1, POINTER_TYPE)
8747 || !validate_arg (arg2, POINTER_TYPE)
8748 || !validate_arg (len, INTEGER_TYPE))
8749 return NULL_TREE;
8751 /* If the LEN parameter is zero, return zero. */
8752 if (integer_zerop (len))
8753 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8754 arg1, arg2);
8756 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8757 if (operand_equal_p (arg1, arg2, 0))
8758 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8760 p1 = c_getstr (arg1);
8761 p2 = c_getstr (arg2);
8763 if (host_integerp (len, 1) && p1 && p2)
8765 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8766 if (i > 0)
8767 return integer_one_node;
8768 else if (i < 0)
8769 return integer_minus_one_node;
8770 else
8771 return integer_zero_node;
8774 /* If the second arg is "", and the length is greater than zero,
8775 return *(const unsigned char*)arg1. */
8776 if (p2 && *p2 == '\0'
8777 && TREE_CODE (len) == INTEGER_CST
8778 && tree_int_cst_sgn (len) == 1)
8780 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8781 tree cst_uchar_ptr_node
8782 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8784 return fold_convert_loc (loc, integer_type_node,
8785 build1 (INDIRECT_REF, cst_uchar_node,
8786 fold_convert_loc (loc,
8787 cst_uchar_ptr_node,
8788 arg1)));
8791 /* If the first arg is "", and the length is greater than zero,
8792 return -*(const unsigned char*)arg2. */
8793 if (p1 && *p1 == '\0'
8794 && TREE_CODE (len) == INTEGER_CST
8795 && tree_int_cst_sgn (len) == 1)
8797 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8798 tree cst_uchar_ptr_node
8799 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8801 tree temp = fold_convert_loc (loc, integer_type_node,
8802 build1 (INDIRECT_REF, cst_uchar_node,
8803 fold_convert_loc (loc,
8804 cst_uchar_ptr_node,
8805 arg2)));
8806 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8809 /* If len parameter is one, return an expression corresponding to
8810 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8811 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8813 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8814 tree cst_uchar_ptr_node
8815 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8817 tree ind1 = fold_convert_loc (loc, integer_type_node,
8818 build1 (INDIRECT_REF, cst_uchar_node,
8819 fold_convert_loc (loc,
8820 cst_uchar_ptr_node,
8821 arg1)));
8822 tree ind2 = fold_convert_loc (loc, integer_type_node,
8823 build1 (INDIRECT_REF, cst_uchar_node,
8824 fold_convert_loc (loc,
8825 cst_uchar_ptr_node,
8826 arg2)));
8827 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8830 return NULL_TREE;
8833 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8834 ARG. Return NULL_TREE if no simplification can be made. */
8836 static tree
8837 fold_builtin_signbit (location_t loc, tree arg, tree type)
8839 tree temp;
8841 if (!validate_arg (arg, REAL_TYPE))
8842 return NULL_TREE;
8844 /* If ARG is a compile-time constant, determine the result. */
8845 if (TREE_CODE (arg) == REAL_CST
8846 && !TREE_OVERFLOW (arg))
8848 REAL_VALUE_TYPE c;
8850 c = TREE_REAL_CST (arg);
8851 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8852 return fold_convert_loc (loc, type, temp);
8855 /* If ARG is non-negative, the result is always zero. */
8856 if (tree_expr_nonnegative_p (arg))
8857 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8859 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8860 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8861 return fold_build2_loc (loc, LT_EXPR, type, arg,
8862 build_real (TREE_TYPE (arg), dconst0));
8864 return NULL_TREE;
8867 /* Fold function call to builtin copysign, copysignf or copysignl with
8868 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8869 be made. */
8871 static tree
8872 fold_builtin_copysign (location_t loc, tree fndecl,
8873 tree arg1, tree arg2, tree type)
8875 tree tem;
8877 if (!validate_arg (arg1, REAL_TYPE)
8878 || !validate_arg (arg2, REAL_TYPE))
8879 return NULL_TREE;
8881 /* copysign(X,X) is X. */
8882 if (operand_equal_p (arg1, arg2, 0))
8883 return fold_convert_loc (loc, type, arg1);
8885 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8886 if (TREE_CODE (arg1) == REAL_CST
8887 && TREE_CODE (arg2) == REAL_CST
8888 && !TREE_OVERFLOW (arg1)
8889 && !TREE_OVERFLOW (arg2))
8891 REAL_VALUE_TYPE c1, c2;
8893 c1 = TREE_REAL_CST (arg1);
8894 c2 = TREE_REAL_CST (arg2);
8895 /* c1.sign := c2.sign. */
8896 real_copysign (&c1, &c2);
8897 return build_real (type, c1);
8900 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8901 Remember to evaluate Y for side-effects. */
8902 if (tree_expr_nonnegative_p (arg2))
8903 return omit_one_operand_loc (loc, type,
8904 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8905 arg2);
8907 /* Strip sign changing operations for the first argument. */
8908 tem = fold_strip_sign_ops (arg1);
8909 if (tem)
8910 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8912 return NULL_TREE;
8915 /* Fold a call to builtin isascii with argument ARG. */
8917 static tree
8918 fold_builtin_isascii (location_t loc, tree arg)
8920 if (!validate_arg (arg, INTEGER_TYPE))
8921 return NULL_TREE;
8922 else
8924 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8925 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8926 build_int_cst (NULL_TREE,
8927 ~ (unsigned HOST_WIDE_INT) 0x7f));
8928 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8929 arg, integer_zero_node);
8933 /* Fold a call to builtin toascii with argument ARG. */
8935 static tree
8936 fold_builtin_toascii (location_t loc, tree arg)
8938 if (!validate_arg (arg, INTEGER_TYPE))
8939 return NULL_TREE;
8941 /* Transform toascii(c) -> (c & 0x7f). */
8942 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8943 build_int_cst (NULL_TREE, 0x7f));
8946 /* Fold a call to builtin isdigit with argument ARG. */
8948 static tree
8949 fold_builtin_isdigit (location_t loc, tree arg)
8951 if (!validate_arg (arg, INTEGER_TYPE))
8952 return NULL_TREE;
8953 else
8955 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8956 /* According to the C standard, isdigit is unaffected by locale.
8957 However, it definitely is affected by the target character set. */
8958 unsigned HOST_WIDE_INT target_digit0
8959 = lang_hooks.to_target_charset ('0');
8961 if (target_digit0 == 0)
8962 return NULL_TREE;
8964 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8965 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8966 build_int_cst (unsigned_type_node, target_digit0));
8967 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8968 build_int_cst (unsigned_type_node, 9));
8972 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8974 static tree
8975 fold_builtin_fabs (location_t loc, tree arg, tree type)
8977 if (!validate_arg (arg, REAL_TYPE))
8978 return NULL_TREE;
8980 arg = fold_convert_loc (loc, type, arg);
8981 if (TREE_CODE (arg) == REAL_CST)
8982 return fold_abs_const (arg, type);
8983 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8986 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8988 static tree
8989 fold_builtin_abs (location_t loc, tree arg, tree type)
8991 if (!validate_arg (arg, INTEGER_TYPE))
8992 return NULL_TREE;
8994 arg = fold_convert_loc (loc, type, arg);
8995 if (TREE_CODE (arg) == INTEGER_CST)
8996 return fold_abs_const (arg, type);
8997 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9000 /* Fold a call to builtin fmin or fmax. */
9002 static tree
9003 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9004 tree type, bool max)
9006 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9008 /* Calculate the result when the argument is a constant. */
9009 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9011 if (res)
9012 return res;
9014 /* If either argument is NaN, return the other one. Avoid the
9015 transformation if we get (and honor) a signalling NaN. Using
9016 omit_one_operand() ensures we create a non-lvalue. */
9017 if (TREE_CODE (arg0) == REAL_CST
9018 && real_isnan (&TREE_REAL_CST (arg0))
9019 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9020 || ! TREE_REAL_CST (arg0).signalling))
9021 return omit_one_operand_loc (loc, type, arg1, arg0);
9022 if (TREE_CODE (arg1) == REAL_CST
9023 && real_isnan (&TREE_REAL_CST (arg1))
9024 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9025 || ! TREE_REAL_CST (arg1).signalling))
9026 return omit_one_operand_loc (loc, type, arg0, arg1);
9028 /* Transform fmin/fmax(x,x) -> x. */
9029 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9030 return omit_one_operand_loc (loc, type, arg0, arg1);
9032 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9033 functions to return the numeric arg if the other one is NaN.
9034 These tree codes don't honor that, so only transform if
9035 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9036 handled, so we don't have to worry about it either. */
9037 if (flag_finite_math_only)
9038 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9039 fold_convert_loc (loc, type, arg0),
9040 fold_convert_loc (loc, type, arg1));
9042 return NULL_TREE;
9045 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9047 static tree
9048 fold_builtin_carg (location_t loc, tree arg, tree type)
9050 if (validate_arg (arg, COMPLEX_TYPE)
9051 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9053 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9055 if (atan2_fn)
9057 tree new_arg = builtin_save_expr (arg);
9058 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9059 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9060 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9064 return NULL_TREE;
9067 /* Fold a call to builtin logb/ilogb. */
9069 static tree
9070 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9072 if (! validate_arg (arg, REAL_TYPE))
9073 return NULL_TREE;
9075 STRIP_NOPS (arg);
9077 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9079 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9081 switch (value->cl)
9083 case rvc_nan:
9084 case rvc_inf:
9085 /* If arg is Inf or NaN and we're logb, return it. */
9086 if (TREE_CODE (rettype) == REAL_TYPE)
9087 return fold_convert_loc (loc, rettype, arg);
9088 /* Fall through... */
9089 case rvc_zero:
9090 /* Zero may set errno and/or raise an exception for logb, also
9091 for ilogb we don't know FP_ILOGB0. */
9092 return NULL_TREE;
9093 case rvc_normal:
9094 /* For normal numbers, proceed iff radix == 2. In GCC,
9095 normalized significands are in the range [0.5, 1.0). We
9096 want the exponent as if they were [1.0, 2.0) so get the
9097 exponent and subtract 1. */
9098 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9099 return fold_convert_loc (loc, rettype,
9100 build_int_cst (NULL_TREE,
9101 REAL_EXP (value)-1));
9102 break;
9106 return NULL_TREE;
9109 /* Fold a call to builtin significand, if radix == 2. */
9111 static tree
9112 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9114 if (! validate_arg (arg, REAL_TYPE))
9115 return NULL_TREE;
9117 STRIP_NOPS (arg);
9119 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9121 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9123 switch (value->cl)
9125 case rvc_zero:
9126 case rvc_nan:
9127 case rvc_inf:
9128 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9129 return fold_convert_loc (loc, rettype, arg);
9130 case rvc_normal:
9131 /* For normal numbers, proceed iff radix == 2. */
9132 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9134 REAL_VALUE_TYPE result = *value;
9135 /* In GCC, normalized significands are in the range [0.5,
9136 1.0). We want them to be [1.0, 2.0) so set the
9137 exponent to 1. */
9138 SET_REAL_EXP (&result, 1);
9139 return build_real (rettype, result);
9141 break;
9145 return NULL_TREE;
9148 /* Fold a call to builtin frexp, we can assume the base is 2. */
9150 static tree
9151 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9153 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9154 return NULL_TREE;
9156 STRIP_NOPS (arg0);
9158 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9159 return NULL_TREE;
9161 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9163 /* Proceed if a valid pointer type was passed in. */
9164 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9166 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9167 tree frac, exp;
9169 switch (value->cl)
9171 case rvc_zero:
9172 /* For +-0, return (*exp = 0, +-0). */
9173 exp = integer_zero_node;
9174 frac = arg0;
9175 break;
9176 case rvc_nan:
9177 case rvc_inf:
9178 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9179 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9180 case rvc_normal:
9182 /* Since the frexp function always expects base 2, and in
9183 GCC normalized significands are already in the range
9184 [0.5, 1.0), we have exactly what frexp wants. */
9185 REAL_VALUE_TYPE frac_rvt = *value;
9186 SET_REAL_EXP (&frac_rvt, 0);
9187 frac = build_real (rettype, frac_rvt);
9188 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9190 break;
9191 default:
9192 gcc_unreachable ();
9195 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9196 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9197 TREE_SIDE_EFFECTS (arg1) = 1;
9198 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9201 return NULL_TREE;
9204 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9205 then we can assume the base is two. If it's false, then we have to
9206 check the mode of the TYPE parameter in certain cases. */
9208 static tree
9209 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9210 tree type, bool ldexp)
9212 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9214 STRIP_NOPS (arg0);
9215 STRIP_NOPS (arg1);
9217 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9218 if (real_zerop (arg0) || integer_zerop (arg1)
9219 || (TREE_CODE (arg0) == REAL_CST
9220 && !real_isfinite (&TREE_REAL_CST (arg0))))
9221 return omit_one_operand_loc (loc, type, arg0, arg1);
9223 /* If both arguments are constant, then try to evaluate it. */
9224 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9225 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9226 && host_integerp (arg1, 0))
9228 /* Bound the maximum adjustment to twice the range of the
9229 mode's valid exponents. Use abs to ensure the range is
9230 positive as a sanity check. */
9231 const long max_exp_adj = 2 *
9232 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9233 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9235 /* Get the user-requested adjustment. */
9236 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9238 /* The requested adjustment must be inside this range. This
9239 is a preliminary cap to avoid things like overflow, we
9240 may still fail to compute the result for other reasons. */
9241 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9243 REAL_VALUE_TYPE initial_result;
9245 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9247 /* Ensure we didn't overflow. */
9248 if (! real_isinf (&initial_result))
9250 const REAL_VALUE_TYPE trunc_result
9251 = real_value_truncate (TYPE_MODE (type), initial_result);
9253 /* Only proceed if the target mode can hold the
9254 resulting value. */
9255 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9256 return build_real (type, trunc_result);
9262 return NULL_TREE;
9265 /* Fold a call to builtin modf. */
9267 static tree
9268 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9270 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9271 return NULL_TREE;
9273 STRIP_NOPS (arg0);
9275 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9276 return NULL_TREE;
9278 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9280 /* Proceed if a valid pointer type was passed in. */
9281 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9283 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9284 REAL_VALUE_TYPE trunc, frac;
9286 switch (value->cl)
9288 case rvc_nan:
9289 case rvc_zero:
9290 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9291 trunc = frac = *value;
9292 break;
9293 case rvc_inf:
9294 /* For +-Inf, return (*arg1 = arg0, +-0). */
9295 frac = dconst0;
9296 frac.sign = value->sign;
9297 trunc = *value;
9298 break;
9299 case rvc_normal:
9300 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9301 real_trunc (&trunc, VOIDmode, value);
9302 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9303 /* If the original number was negative and already
9304 integral, then the fractional part is -0.0. */
9305 if (value->sign && frac.cl == rvc_zero)
9306 frac.sign = value->sign;
9307 break;
9310 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9311 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9312 build_real (rettype, trunc));
9313 TREE_SIDE_EFFECTS (arg1) = 1;
9314 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9315 build_real (rettype, frac));
9318 return NULL_TREE;
9321 /* Given a location LOC, an interclass builtin function decl FNDECL
9322 and its single argument ARG, return an folded expression computing
9323 the same, or NULL_TREE if we either couldn't or didn't want to fold
9324 (the latter happen if there's an RTL instruction available). */
9326 static tree
9327 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9329 enum machine_mode mode;
9331 if (!validate_arg (arg, REAL_TYPE))
9332 return NULL_TREE;
9334 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9335 return NULL_TREE;
9337 mode = TYPE_MODE (TREE_TYPE (arg));
9339 /* If there is no optab, try generic code. */
9340 switch (DECL_FUNCTION_CODE (fndecl))
9342 tree result;
9344 CASE_FLT_FN (BUILT_IN_ISINF):
9346 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9347 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9348 tree const type = TREE_TYPE (arg);
9349 REAL_VALUE_TYPE r;
9350 char buf[128];
9352 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9353 real_from_string (&r, buf);
9354 result = build_call_expr (isgr_fn, 2,
9355 fold_build1_loc (loc, ABS_EXPR, type, arg),
9356 build_real (type, r));
9357 return result;
9359 CASE_FLT_FN (BUILT_IN_FINITE):
9360 case BUILT_IN_ISFINITE:
9362 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9363 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9364 tree const type = TREE_TYPE (arg);
9365 REAL_VALUE_TYPE r;
9366 char buf[128];
9368 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9369 real_from_string (&r, buf);
9370 result = build_call_expr (isle_fn, 2,
9371 fold_build1_loc (loc, ABS_EXPR, type, arg),
9372 build_real (type, r));
9373 /*result = fold_build2_loc (loc, UNGT_EXPR,
9374 TREE_TYPE (TREE_TYPE (fndecl)),
9375 fold_build1_loc (loc, ABS_EXPR, type, arg),
9376 build_real (type, r));
9377 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9378 TREE_TYPE (TREE_TYPE (fndecl)),
9379 result);*/
9380 return result;
9382 case BUILT_IN_ISNORMAL:
9384 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9385 islessequal(fabs(x),DBL_MAX). */
9386 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9387 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9388 tree const type = TREE_TYPE (arg);
9389 REAL_VALUE_TYPE rmax, rmin;
9390 char buf[128];
9392 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9393 real_from_string (&rmax, buf);
9394 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9395 real_from_string (&rmin, buf);
9396 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9397 result = build_call_expr (isle_fn, 2, arg,
9398 build_real (type, rmax));
9399 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9400 build_call_expr (isge_fn, 2, arg,
9401 build_real (type, rmin)));
9402 return result;
9404 default:
9405 break;
9408 return NULL_TREE;
9411 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9412 ARG is the argument for the call. */
9414 static tree
9415 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9417 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9418 REAL_VALUE_TYPE r;
9420 if (!validate_arg (arg, REAL_TYPE))
9421 return NULL_TREE;
9423 switch (builtin_index)
9425 case BUILT_IN_ISINF:
9426 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9427 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9429 if (TREE_CODE (arg) == REAL_CST)
9431 r = TREE_REAL_CST (arg);
9432 if (real_isinf (&r))
9433 return real_compare (GT_EXPR, &r, &dconst0)
9434 ? integer_one_node : integer_minus_one_node;
9435 else
9436 return integer_zero_node;
9439 return NULL_TREE;
9441 case BUILT_IN_ISINF_SIGN:
9443 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9444 /* In a boolean context, GCC will fold the inner COND_EXPR to
9445 1. So e.g. "if (isinf_sign(x))" would be folded to just
9446 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9447 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9448 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9449 tree tmp = NULL_TREE;
9451 arg = builtin_save_expr (arg);
9453 if (signbit_fn && isinf_fn)
9455 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9456 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9458 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9459 signbit_call, integer_zero_node);
9460 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9461 isinf_call, integer_zero_node);
9463 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9464 integer_minus_one_node, integer_one_node);
9465 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9466 isinf_call, tmp,
9467 integer_zero_node);
9470 return tmp;
9473 case BUILT_IN_ISFINITE:
9474 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9475 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9476 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9478 if (TREE_CODE (arg) == REAL_CST)
9480 r = TREE_REAL_CST (arg);
9481 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9484 return NULL_TREE;
9486 case BUILT_IN_ISNAN:
9487 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9488 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9490 if (TREE_CODE (arg) == REAL_CST)
9492 r = TREE_REAL_CST (arg);
9493 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9496 arg = builtin_save_expr (arg);
9497 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9499 default:
9500 gcc_unreachable ();
9504 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9505 This builtin will generate code to return the appropriate floating
9506 point classification depending on the value of the floating point
9507 number passed in. The possible return values must be supplied as
9508 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9509 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9510 one floating point argument which is "type generic". */
9512 static tree
9513 fold_builtin_fpclassify (location_t loc, tree exp)
9515 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9516 arg, type, res, tmp;
9517 enum machine_mode mode;
9518 REAL_VALUE_TYPE r;
9519 char buf[128];
9521 /* Verify the required arguments in the original call. */
9522 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9523 INTEGER_TYPE, INTEGER_TYPE,
9524 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9525 return NULL_TREE;
9527 fp_nan = CALL_EXPR_ARG (exp, 0);
9528 fp_infinite = CALL_EXPR_ARG (exp, 1);
9529 fp_normal = CALL_EXPR_ARG (exp, 2);
9530 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9531 fp_zero = CALL_EXPR_ARG (exp, 4);
9532 arg = CALL_EXPR_ARG (exp, 5);
9533 type = TREE_TYPE (arg);
9534 mode = TYPE_MODE (type);
9535 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9537 /* fpclassify(x) ->
9538 isnan(x) ? FP_NAN :
9539 (fabs(x) == Inf ? FP_INFINITE :
9540 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9541 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9543 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9544 build_real (type, dconst0));
9545 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9546 tmp, fp_zero, fp_subnormal);
9548 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9549 real_from_string (&r, buf);
9550 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9551 arg, build_real (type, r));
9552 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9554 if (HONOR_INFINITIES (mode))
9556 real_inf (&r);
9557 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9558 build_real (type, r));
9559 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9560 fp_infinite, res);
9563 if (HONOR_NANS (mode))
9565 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9566 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9569 return res;
9572 /* Fold a call to an unordered comparison function such as
9573 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9574 being called and ARG0 and ARG1 are the arguments for the call.
9575 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9576 the opposite of the desired result. UNORDERED_CODE is used
9577 for modes that can hold NaNs and ORDERED_CODE is used for
9578 the rest. */
9580 static tree
9581 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9582 enum tree_code unordered_code,
9583 enum tree_code ordered_code)
9585 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9586 enum tree_code code;
9587 tree type0, type1;
9588 enum tree_code code0, code1;
9589 tree cmp_type = NULL_TREE;
9591 type0 = TREE_TYPE (arg0);
9592 type1 = TREE_TYPE (arg1);
9594 code0 = TREE_CODE (type0);
9595 code1 = TREE_CODE (type1);
9597 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9598 /* Choose the wider of two real types. */
9599 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9600 ? type0 : type1;
9601 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9602 cmp_type = type0;
9603 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9604 cmp_type = type1;
9606 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9607 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9609 if (unordered_code == UNORDERED_EXPR)
9611 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9612 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9613 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9616 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9617 : ordered_code;
9618 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9619 fold_build2_loc (loc, code, type, arg0, arg1));
9622 /* Fold a call to built-in function FNDECL with 0 arguments.
9623 IGNORE is true if the result of the function call is ignored. This
9624 function returns NULL_TREE if no simplification was possible. */
9626 static tree
9627 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9629 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9630 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9631 switch (fcode)
9633 CASE_FLT_FN (BUILT_IN_INF):
9634 case BUILT_IN_INFD32:
9635 case BUILT_IN_INFD64:
9636 case BUILT_IN_INFD128:
9637 return fold_builtin_inf (loc, type, true);
9639 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9640 return fold_builtin_inf (loc, type, false);
9642 case BUILT_IN_CLASSIFY_TYPE:
9643 return fold_builtin_classify_type (NULL_TREE);
9645 default:
9646 break;
9648 return NULL_TREE;
9651 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9652 IGNORE is true if the result of the function call is ignored. This
9653 function returns NULL_TREE if no simplification was possible. */
9655 static tree
9656 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9658 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9659 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9660 switch (fcode)
9663 case BUILT_IN_CONSTANT_P:
9665 tree val = fold_builtin_constant_p (arg0);
9667 /* Gimplification will pull the CALL_EXPR for the builtin out of
9668 an if condition. When not optimizing, we'll not CSE it back.
9669 To avoid link error types of regressions, return false now. */
9670 if (!val && !optimize)
9671 val = integer_zero_node;
9673 return val;
9676 case BUILT_IN_CLASSIFY_TYPE:
9677 return fold_builtin_classify_type (arg0);
9679 case BUILT_IN_STRLEN:
9680 return fold_builtin_strlen (loc, type, arg0);
9682 CASE_FLT_FN (BUILT_IN_FABS):
9683 return fold_builtin_fabs (loc, arg0, type);
9685 case BUILT_IN_ABS:
9686 case BUILT_IN_LABS:
9687 case BUILT_IN_LLABS:
9688 case BUILT_IN_IMAXABS:
9689 return fold_builtin_abs (loc, arg0, type);
9691 CASE_FLT_FN (BUILT_IN_CONJ):
9692 if (validate_arg (arg0, COMPLEX_TYPE)
9693 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9694 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9695 break;
9697 CASE_FLT_FN (BUILT_IN_CREAL):
9698 if (validate_arg (arg0, COMPLEX_TYPE)
9699 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9700 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9701 break;
9703 CASE_FLT_FN (BUILT_IN_CIMAG):
9704 if (validate_arg (arg0, COMPLEX_TYPE)
9705 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9706 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9707 break;
9709 CASE_FLT_FN (BUILT_IN_CCOS):
9710 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9712 CASE_FLT_FN (BUILT_IN_CCOSH):
9713 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9715 CASE_FLT_FN (BUILT_IN_CSIN):
9716 if (validate_arg (arg0, COMPLEX_TYPE)
9717 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9718 return do_mpc_arg1 (arg0, type, mpc_sin);
9719 break;
9721 CASE_FLT_FN (BUILT_IN_CSINH):
9722 if (validate_arg (arg0, COMPLEX_TYPE)
9723 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9724 return do_mpc_arg1 (arg0, type, mpc_sinh);
9725 break;
9727 CASE_FLT_FN (BUILT_IN_CTAN):
9728 if (validate_arg (arg0, COMPLEX_TYPE)
9729 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9730 return do_mpc_arg1 (arg0, type, mpc_tan);
9731 break;
9733 CASE_FLT_FN (BUILT_IN_CTANH):
9734 if (validate_arg (arg0, COMPLEX_TYPE)
9735 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9736 return do_mpc_arg1 (arg0, type, mpc_tanh);
9737 break;
9739 CASE_FLT_FN (BUILT_IN_CLOG):
9740 if (validate_arg (arg0, COMPLEX_TYPE)
9741 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9742 return do_mpc_arg1 (arg0, type, mpc_log);
9743 break;
9745 CASE_FLT_FN (BUILT_IN_CSQRT):
9746 if (validate_arg (arg0, COMPLEX_TYPE)
9747 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9748 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9749 break;
9751 CASE_FLT_FN (BUILT_IN_CASIN):
9752 if (validate_arg (arg0, COMPLEX_TYPE)
9753 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9754 return do_mpc_arg1 (arg0, type, mpc_asin);
9755 break;
9757 CASE_FLT_FN (BUILT_IN_CACOS):
9758 if (validate_arg (arg0, COMPLEX_TYPE)
9759 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9760 return do_mpc_arg1 (arg0, type, mpc_acos);
9761 break;
9763 CASE_FLT_FN (BUILT_IN_CATAN):
9764 if (validate_arg (arg0, COMPLEX_TYPE)
9765 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9766 return do_mpc_arg1 (arg0, type, mpc_atan);
9767 break;
9769 CASE_FLT_FN (BUILT_IN_CASINH):
9770 if (validate_arg (arg0, COMPLEX_TYPE)
9771 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9772 return do_mpc_arg1 (arg0, type, mpc_asinh);
9773 break;
9775 CASE_FLT_FN (BUILT_IN_CACOSH):
9776 if (validate_arg (arg0, COMPLEX_TYPE)
9777 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9778 return do_mpc_arg1 (arg0, type, mpc_acosh);
9779 break;
9781 CASE_FLT_FN (BUILT_IN_CATANH):
9782 if (validate_arg (arg0, COMPLEX_TYPE)
9783 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9784 return do_mpc_arg1 (arg0, type, mpc_atanh);
9785 break;
9787 CASE_FLT_FN (BUILT_IN_CABS):
9788 return fold_builtin_cabs (loc, arg0, type, fndecl);
9790 CASE_FLT_FN (BUILT_IN_CARG):
9791 return fold_builtin_carg (loc, arg0, type);
9793 CASE_FLT_FN (BUILT_IN_SQRT):
9794 return fold_builtin_sqrt (loc, arg0, type);
9796 CASE_FLT_FN (BUILT_IN_CBRT):
9797 return fold_builtin_cbrt (loc, arg0, type);
9799 CASE_FLT_FN (BUILT_IN_ASIN):
9800 if (validate_arg (arg0, REAL_TYPE))
9801 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9802 &dconstm1, &dconst1, true);
9803 break;
9805 CASE_FLT_FN (BUILT_IN_ACOS):
9806 if (validate_arg (arg0, REAL_TYPE))
9807 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9808 &dconstm1, &dconst1, true);
9809 break;
9811 CASE_FLT_FN (BUILT_IN_ATAN):
9812 if (validate_arg (arg0, REAL_TYPE))
9813 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9814 break;
9816 CASE_FLT_FN (BUILT_IN_ASINH):
9817 if (validate_arg (arg0, REAL_TYPE))
9818 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9819 break;
9821 CASE_FLT_FN (BUILT_IN_ACOSH):
9822 if (validate_arg (arg0, REAL_TYPE))
9823 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9824 &dconst1, NULL, true);
9825 break;
9827 CASE_FLT_FN (BUILT_IN_ATANH):
9828 if (validate_arg (arg0, REAL_TYPE))
9829 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9830 &dconstm1, &dconst1, false);
9831 break;
9833 CASE_FLT_FN (BUILT_IN_SIN):
9834 if (validate_arg (arg0, REAL_TYPE))
9835 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9836 break;
9838 CASE_FLT_FN (BUILT_IN_COS):
9839 return fold_builtin_cos (loc, arg0, type, fndecl);
9841 CASE_FLT_FN (BUILT_IN_TAN):
9842 return fold_builtin_tan (arg0, type);
9844 CASE_FLT_FN (BUILT_IN_CEXP):
9845 return fold_builtin_cexp (loc, arg0, type);
9847 CASE_FLT_FN (BUILT_IN_CEXPI):
9848 if (validate_arg (arg0, REAL_TYPE))
9849 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9850 break;
9852 CASE_FLT_FN (BUILT_IN_SINH):
9853 if (validate_arg (arg0, REAL_TYPE))
9854 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9855 break;
9857 CASE_FLT_FN (BUILT_IN_COSH):
9858 return fold_builtin_cosh (loc, arg0, type, fndecl);
9860 CASE_FLT_FN (BUILT_IN_TANH):
9861 if (validate_arg (arg0, REAL_TYPE))
9862 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9863 break;
9865 CASE_FLT_FN (BUILT_IN_ERF):
9866 if (validate_arg (arg0, REAL_TYPE))
9867 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9868 break;
9870 CASE_FLT_FN (BUILT_IN_ERFC):
9871 if (validate_arg (arg0, REAL_TYPE))
9872 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9873 break;
9875 CASE_FLT_FN (BUILT_IN_TGAMMA):
9876 if (validate_arg (arg0, REAL_TYPE))
9877 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9878 break;
9880 CASE_FLT_FN (BUILT_IN_EXP):
9881 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9883 CASE_FLT_FN (BUILT_IN_EXP2):
9884 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9886 CASE_FLT_FN (BUILT_IN_EXP10):
9887 CASE_FLT_FN (BUILT_IN_POW10):
9888 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9890 CASE_FLT_FN (BUILT_IN_EXPM1):
9891 if (validate_arg (arg0, REAL_TYPE))
9892 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9893 break;
9895 CASE_FLT_FN (BUILT_IN_LOG):
9896 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9898 CASE_FLT_FN (BUILT_IN_LOG2):
9899 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9901 CASE_FLT_FN (BUILT_IN_LOG10):
9902 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9904 CASE_FLT_FN (BUILT_IN_LOG1P):
9905 if (validate_arg (arg0, REAL_TYPE))
9906 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9907 &dconstm1, NULL, false);
9908 break;
9910 CASE_FLT_FN (BUILT_IN_J0):
9911 if (validate_arg (arg0, REAL_TYPE))
9912 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9913 NULL, NULL, 0);
9914 break;
9916 CASE_FLT_FN (BUILT_IN_J1):
9917 if (validate_arg (arg0, REAL_TYPE))
9918 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9919 NULL, NULL, 0);
9920 break;
9922 CASE_FLT_FN (BUILT_IN_Y0):
9923 if (validate_arg (arg0, REAL_TYPE))
9924 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9925 &dconst0, NULL, false);
9926 break;
9928 CASE_FLT_FN (BUILT_IN_Y1):
9929 if (validate_arg (arg0, REAL_TYPE))
9930 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9931 &dconst0, NULL, false);
9932 break;
9934 CASE_FLT_FN (BUILT_IN_NAN):
9935 case BUILT_IN_NAND32:
9936 case BUILT_IN_NAND64:
9937 case BUILT_IN_NAND128:
9938 return fold_builtin_nan (arg0, type, true);
9940 CASE_FLT_FN (BUILT_IN_NANS):
9941 return fold_builtin_nan (arg0, type, false);
9943 CASE_FLT_FN (BUILT_IN_FLOOR):
9944 return fold_builtin_floor (loc, fndecl, arg0);
9946 CASE_FLT_FN (BUILT_IN_CEIL):
9947 return fold_builtin_ceil (loc, fndecl, arg0);
9949 CASE_FLT_FN (BUILT_IN_TRUNC):
9950 return fold_builtin_trunc (loc, fndecl, arg0);
9952 CASE_FLT_FN (BUILT_IN_ROUND):
9953 return fold_builtin_round (loc, fndecl, arg0);
9955 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9956 CASE_FLT_FN (BUILT_IN_RINT):
9957 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9959 CASE_FLT_FN (BUILT_IN_LCEIL):
9960 CASE_FLT_FN (BUILT_IN_LLCEIL):
9961 CASE_FLT_FN (BUILT_IN_LFLOOR):
9962 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9963 CASE_FLT_FN (BUILT_IN_LROUND):
9964 CASE_FLT_FN (BUILT_IN_LLROUND):
9965 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9967 CASE_FLT_FN (BUILT_IN_LRINT):
9968 CASE_FLT_FN (BUILT_IN_LLRINT):
9969 return fold_fixed_mathfn (loc, fndecl, arg0);
9971 case BUILT_IN_BSWAP32:
9972 case BUILT_IN_BSWAP64:
9973 return fold_builtin_bswap (fndecl, arg0);
9975 CASE_INT_FN (BUILT_IN_FFS):
9976 CASE_INT_FN (BUILT_IN_CLZ):
9977 CASE_INT_FN (BUILT_IN_CTZ):
9978 CASE_INT_FN (BUILT_IN_POPCOUNT):
9979 CASE_INT_FN (BUILT_IN_PARITY):
9980 return fold_builtin_bitop (fndecl, arg0);
9982 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9983 return fold_builtin_signbit (loc, arg0, type);
9985 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9986 return fold_builtin_significand (loc, arg0, type);
9988 CASE_FLT_FN (BUILT_IN_ILOGB):
9989 CASE_FLT_FN (BUILT_IN_LOGB):
9990 return fold_builtin_logb (loc, arg0, type);
9992 case BUILT_IN_ISASCII:
9993 return fold_builtin_isascii (loc, arg0);
9995 case BUILT_IN_TOASCII:
9996 return fold_builtin_toascii (loc, arg0);
9998 case BUILT_IN_ISDIGIT:
9999 return fold_builtin_isdigit (loc, arg0);
10001 CASE_FLT_FN (BUILT_IN_FINITE):
10002 case BUILT_IN_FINITED32:
10003 case BUILT_IN_FINITED64:
10004 case BUILT_IN_FINITED128:
10005 case BUILT_IN_ISFINITE:
10007 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10008 if (ret)
10009 return ret;
10010 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10013 CASE_FLT_FN (BUILT_IN_ISINF):
10014 case BUILT_IN_ISINFD32:
10015 case BUILT_IN_ISINFD64:
10016 case BUILT_IN_ISINFD128:
10018 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10019 if (ret)
10020 return ret;
10021 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10024 case BUILT_IN_ISNORMAL:
10025 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10027 case BUILT_IN_ISINF_SIGN:
10028 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10030 CASE_FLT_FN (BUILT_IN_ISNAN):
10031 case BUILT_IN_ISNAND32:
10032 case BUILT_IN_ISNAND64:
10033 case BUILT_IN_ISNAND128:
10034 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10036 case BUILT_IN_PRINTF:
10037 case BUILT_IN_PRINTF_UNLOCKED:
10038 case BUILT_IN_VPRINTF:
10039 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10041 default:
10042 break;
10045 return NULL_TREE;
10049 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10050 IGNORE is true if the result of the function call is ignored. This
10051 function returns NULL_TREE if no simplification was possible. */
10053 static tree
10054 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10056 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10057 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10059 switch (fcode)
10061 CASE_FLT_FN (BUILT_IN_JN):
10062 if (validate_arg (arg0, INTEGER_TYPE)
10063 && validate_arg (arg1, REAL_TYPE))
10064 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10065 break;
10067 CASE_FLT_FN (BUILT_IN_YN):
10068 if (validate_arg (arg0, INTEGER_TYPE)
10069 && validate_arg (arg1, REAL_TYPE))
10070 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10071 &dconst0, false);
10072 break;
10074 CASE_FLT_FN (BUILT_IN_DREM):
10075 CASE_FLT_FN (BUILT_IN_REMAINDER):
10076 if (validate_arg (arg0, REAL_TYPE)
10077 && validate_arg(arg1, REAL_TYPE))
10078 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10079 break;
10081 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10082 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10083 if (validate_arg (arg0, REAL_TYPE)
10084 && validate_arg(arg1, POINTER_TYPE))
10085 return do_mpfr_lgamma_r (arg0, arg1, type);
10086 break;
10088 CASE_FLT_FN (BUILT_IN_ATAN2):
10089 if (validate_arg (arg0, REAL_TYPE)
10090 && validate_arg(arg1, REAL_TYPE))
10091 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10092 break;
10094 CASE_FLT_FN (BUILT_IN_FDIM):
10095 if (validate_arg (arg0, REAL_TYPE)
10096 && validate_arg(arg1, REAL_TYPE))
10097 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10098 break;
10100 CASE_FLT_FN (BUILT_IN_HYPOT):
10101 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10103 CASE_FLT_FN (BUILT_IN_CPOW):
10104 if (validate_arg (arg0, COMPLEX_TYPE)
10105 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10106 && validate_arg (arg1, COMPLEX_TYPE)
10107 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10108 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10109 break;
10111 CASE_FLT_FN (BUILT_IN_LDEXP):
10112 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10113 CASE_FLT_FN (BUILT_IN_SCALBN):
10114 CASE_FLT_FN (BUILT_IN_SCALBLN):
10115 return fold_builtin_load_exponent (loc, arg0, arg1,
10116 type, /*ldexp=*/false);
10118 CASE_FLT_FN (BUILT_IN_FREXP):
10119 return fold_builtin_frexp (loc, arg0, arg1, type);
10121 CASE_FLT_FN (BUILT_IN_MODF):
10122 return fold_builtin_modf (loc, arg0, arg1, type);
10124 case BUILT_IN_BZERO:
10125 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10127 case BUILT_IN_FPUTS:
10128 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10130 case BUILT_IN_FPUTS_UNLOCKED:
10131 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10133 case BUILT_IN_STRSTR:
10134 return fold_builtin_strstr (loc, arg0, arg1, type);
10136 case BUILT_IN_STRCAT:
10137 return fold_builtin_strcat (loc, arg0, arg1);
10139 case BUILT_IN_STRSPN:
10140 return fold_builtin_strspn (loc, arg0, arg1);
10142 case BUILT_IN_STRCSPN:
10143 return fold_builtin_strcspn (loc, arg0, arg1);
10145 case BUILT_IN_STRCHR:
10146 case BUILT_IN_INDEX:
10147 return fold_builtin_strchr (loc, arg0, arg1, type);
10149 case BUILT_IN_STRRCHR:
10150 case BUILT_IN_RINDEX:
10151 return fold_builtin_strrchr (loc, arg0, arg1, type);
10153 case BUILT_IN_STRCPY:
10154 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10156 case BUILT_IN_STPCPY:
10157 if (ignore)
10159 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10160 if (!fn)
10161 break;
10163 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10165 else
10166 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10167 break;
10169 case BUILT_IN_STRCMP:
10170 return fold_builtin_strcmp (loc, arg0, arg1);
10172 case BUILT_IN_STRPBRK:
10173 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10175 case BUILT_IN_EXPECT:
10176 return fold_builtin_expect (loc, arg0, arg1);
10178 CASE_FLT_FN (BUILT_IN_POW):
10179 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10181 CASE_FLT_FN (BUILT_IN_POWI):
10182 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10184 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10185 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10187 CASE_FLT_FN (BUILT_IN_FMIN):
10188 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10190 CASE_FLT_FN (BUILT_IN_FMAX):
10191 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10193 case BUILT_IN_ISGREATER:
10194 return fold_builtin_unordered_cmp (loc, fndecl,
10195 arg0, arg1, UNLE_EXPR, LE_EXPR);
10196 case BUILT_IN_ISGREATEREQUAL:
10197 return fold_builtin_unordered_cmp (loc, fndecl,
10198 arg0, arg1, UNLT_EXPR, LT_EXPR);
10199 case BUILT_IN_ISLESS:
10200 return fold_builtin_unordered_cmp (loc, fndecl,
10201 arg0, arg1, UNGE_EXPR, GE_EXPR);
10202 case BUILT_IN_ISLESSEQUAL:
10203 return fold_builtin_unordered_cmp (loc, fndecl,
10204 arg0, arg1, UNGT_EXPR, GT_EXPR);
10205 case BUILT_IN_ISLESSGREATER:
10206 return fold_builtin_unordered_cmp (loc, fndecl,
10207 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10208 case BUILT_IN_ISUNORDERED:
10209 return fold_builtin_unordered_cmp (loc, fndecl,
10210 arg0, arg1, UNORDERED_EXPR,
10211 NOP_EXPR);
10213 /* We do the folding for va_start in the expander. */
10214 case BUILT_IN_VA_START:
10215 break;
10217 case BUILT_IN_SPRINTF:
10218 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10220 case BUILT_IN_OBJECT_SIZE:
10221 return fold_builtin_object_size (arg0, arg1);
10223 case BUILT_IN_PRINTF:
10224 case BUILT_IN_PRINTF_UNLOCKED:
10225 case BUILT_IN_VPRINTF:
10226 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10228 case BUILT_IN_PRINTF_CHK:
10229 case BUILT_IN_VPRINTF_CHK:
10230 if (!validate_arg (arg0, INTEGER_TYPE)
10231 || TREE_SIDE_EFFECTS (arg0))
10232 return NULL_TREE;
10233 else
10234 return fold_builtin_printf (loc, fndecl,
10235 arg1, NULL_TREE, ignore, fcode);
10236 break;
10238 case BUILT_IN_FPRINTF:
10239 case BUILT_IN_FPRINTF_UNLOCKED:
10240 case BUILT_IN_VFPRINTF:
10241 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10242 ignore, fcode);
10244 default:
10245 break;
10247 return NULL_TREE;
10250 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10251 and ARG2. IGNORE is true if the result of the function call is ignored.
10252 This function returns NULL_TREE if no simplification was possible. */
10254 static tree
10255 fold_builtin_3 (location_t loc, tree fndecl,
10256 tree arg0, tree arg1, tree arg2, bool ignore)
10258 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10259 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10260 switch (fcode)
10263 CASE_FLT_FN (BUILT_IN_SINCOS):
10264 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10266 CASE_FLT_FN (BUILT_IN_FMA):
10267 if (validate_arg (arg0, REAL_TYPE)
10268 && validate_arg(arg1, REAL_TYPE)
10269 && validate_arg(arg2, REAL_TYPE))
10270 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10271 break;
10273 CASE_FLT_FN (BUILT_IN_REMQUO):
10274 if (validate_arg (arg0, REAL_TYPE)
10275 && validate_arg(arg1, REAL_TYPE)
10276 && validate_arg(arg2, POINTER_TYPE))
10277 return do_mpfr_remquo (arg0, arg1, arg2);
10278 break;
10280 case BUILT_IN_MEMSET:
10281 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10283 case BUILT_IN_BCOPY:
10284 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10285 void_type_node, true, /*endp=*/3);
10287 case BUILT_IN_MEMCPY:
10288 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10289 type, ignore, /*endp=*/0);
10291 case BUILT_IN_MEMPCPY:
10292 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10293 type, ignore, /*endp=*/1);
10295 case BUILT_IN_MEMMOVE:
10296 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10297 type, ignore, /*endp=*/3);
10299 case BUILT_IN_STRNCAT:
10300 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10302 case BUILT_IN_STRNCPY:
10303 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10305 case BUILT_IN_STRNCMP:
10306 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10308 case BUILT_IN_MEMCHR:
10309 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10311 case BUILT_IN_BCMP:
10312 case BUILT_IN_MEMCMP:
10313 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10315 case BUILT_IN_SPRINTF:
10316 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10318 case BUILT_IN_STRCPY_CHK:
10319 case BUILT_IN_STPCPY_CHK:
10320 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10321 ignore, fcode);
10323 case BUILT_IN_STRCAT_CHK:
10324 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10326 case BUILT_IN_PRINTF_CHK:
10327 case BUILT_IN_VPRINTF_CHK:
10328 if (!validate_arg (arg0, INTEGER_TYPE)
10329 || TREE_SIDE_EFFECTS (arg0))
10330 return NULL_TREE;
10331 else
10332 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10333 break;
10335 case BUILT_IN_FPRINTF:
10336 case BUILT_IN_FPRINTF_UNLOCKED:
10337 case BUILT_IN_VFPRINTF:
10338 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10339 ignore, fcode);
10341 case BUILT_IN_FPRINTF_CHK:
10342 case BUILT_IN_VFPRINTF_CHK:
10343 if (!validate_arg (arg1, INTEGER_TYPE)
10344 || TREE_SIDE_EFFECTS (arg1))
10345 return NULL_TREE;
10346 else
10347 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10348 ignore, fcode);
10350 default:
10351 break;
10353 return NULL_TREE;
10356 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10357 ARG2, and ARG3. IGNORE is true if the result of the function call is
10358 ignored. This function returns NULL_TREE if no simplification was
10359 possible. */
10361 static tree
10362 fold_builtin_4 (location_t loc, tree fndecl,
10363 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10365 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10367 switch (fcode)
10369 case BUILT_IN_MEMCPY_CHK:
10370 case BUILT_IN_MEMPCPY_CHK:
10371 case BUILT_IN_MEMMOVE_CHK:
10372 case BUILT_IN_MEMSET_CHK:
10373 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10374 NULL_TREE, ignore,
10375 DECL_FUNCTION_CODE (fndecl));
10377 case BUILT_IN_STRNCPY_CHK:
10378 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10380 case BUILT_IN_STRNCAT_CHK:
10381 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10383 case BUILT_IN_FPRINTF_CHK:
10384 case BUILT_IN_VFPRINTF_CHK:
10385 if (!validate_arg (arg1, INTEGER_TYPE)
10386 || TREE_SIDE_EFFECTS (arg1))
10387 return NULL_TREE;
10388 else
10389 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10390 ignore, fcode);
10391 break;
10393 default:
10394 break;
10396 return NULL_TREE;
10399 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10400 arguments, where NARGS <= 4. IGNORE is true if the result of the
10401 function call is ignored. This function returns NULL_TREE if no
10402 simplification was possible. Note that this only folds builtins with
10403 fixed argument patterns. Foldings that do varargs-to-varargs
10404 transformations, or that match calls with more than 4 arguments,
10405 need to be handled with fold_builtin_varargs instead. */
10407 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10409 static tree
10410 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10412 tree ret = NULL_TREE;
10414 switch (nargs)
10416 case 0:
10417 ret = fold_builtin_0 (loc, fndecl, ignore);
10418 break;
10419 case 1:
10420 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10421 break;
10422 case 2:
10423 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10424 break;
10425 case 3:
10426 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10427 break;
10428 case 4:
10429 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10430 ignore);
10431 break;
10432 default:
10433 break;
10435 if (ret)
10437 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10438 SET_EXPR_LOCATION (ret, loc);
10439 TREE_NO_WARNING (ret) = 1;
10440 return ret;
10442 return NULL_TREE;
10445 /* Builtins with folding operations that operate on "..." arguments
10446 need special handling; we need to store the arguments in a convenient
10447 data structure before attempting any folding. Fortunately there are
10448 only a few builtins that fall into this category. FNDECL is the
10449 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10450 result of the function call is ignored. */
10452 static tree
10453 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10454 bool ignore ATTRIBUTE_UNUSED)
10456 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10457 tree ret = NULL_TREE;
10459 switch (fcode)
10461 case BUILT_IN_SPRINTF_CHK:
10462 case BUILT_IN_VSPRINTF_CHK:
10463 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10464 break;
10466 case BUILT_IN_SNPRINTF_CHK:
10467 case BUILT_IN_VSNPRINTF_CHK:
10468 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10469 break;
10471 case BUILT_IN_FPCLASSIFY:
10472 ret = fold_builtin_fpclassify (loc, exp);
10473 break;
10475 default:
10476 break;
10478 if (ret)
10480 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10481 SET_EXPR_LOCATION (ret, loc);
10482 TREE_NO_WARNING (ret) = 1;
10483 return ret;
10485 return NULL_TREE;
10488 /* Return true if FNDECL shouldn't be folded right now.
10489 If a built-in function has an inline attribute always_inline
10490 wrapper, defer folding it after always_inline functions have
10491 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10492 might not be performed. */
10494 static bool
10495 avoid_folding_inline_builtin (tree fndecl)
10497 return (DECL_DECLARED_INLINE_P (fndecl)
10498 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10499 && cfun
10500 && !cfun->always_inline_functions_inlined
10501 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10504 /* A wrapper function for builtin folding that prevents warnings for
10505 "statement without effect" and the like, caused by removing the
10506 call node earlier than the warning is generated. */
10508 tree
10509 fold_call_expr (location_t loc, tree exp, bool ignore)
10511 tree ret = NULL_TREE;
10512 tree fndecl = get_callee_fndecl (exp);
10513 if (fndecl
10514 && TREE_CODE (fndecl) == FUNCTION_DECL
10515 && DECL_BUILT_IN (fndecl)
10516 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10517 yet. Defer folding until we see all the arguments
10518 (after inlining). */
10519 && !CALL_EXPR_VA_ARG_PACK (exp))
10521 int nargs = call_expr_nargs (exp);
10523 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10524 instead last argument is __builtin_va_arg_pack (). Defer folding
10525 even in that case, until arguments are finalized. */
10526 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10528 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10529 if (fndecl2
10530 && TREE_CODE (fndecl2) == FUNCTION_DECL
10531 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10532 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10533 return NULL_TREE;
10536 if (avoid_folding_inline_builtin (fndecl))
10537 return NULL_TREE;
10539 /* FIXME: Don't use a list in this interface. */
10540 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10541 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10542 else
10544 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10546 tree *args = CALL_EXPR_ARGP (exp);
10547 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10549 if (!ret)
10550 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10551 if (ret)
10552 return ret;
10555 return NULL_TREE;
10558 /* Conveniently construct a function call expression. FNDECL names the
10559 function to be called and ARGLIST is a TREE_LIST of arguments. */
10561 tree
10562 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10564 tree fntype = TREE_TYPE (fndecl);
10565 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10566 int n = list_length (arglist);
10567 tree *argarray = (tree *) alloca (n * sizeof (tree));
10568 int i;
10570 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10571 argarray[i] = TREE_VALUE (arglist);
10572 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10575 /* Conveniently construct a function call expression. FNDECL names the
10576 function to be called, N is the number of arguments, and the "..."
10577 parameters are the argument expressions. */
10579 tree
10580 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10582 va_list ap;
10583 tree fntype = TREE_TYPE (fndecl);
10584 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10585 tree *argarray = (tree *) alloca (n * sizeof (tree));
10586 int i;
10588 va_start (ap, n);
10589 for (i = 0; i < n; i++)
10590 argarray[i] = va_arg (ap, tree);
10591 va_end (ap);
10592 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10595 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10596 N arguments are passed in the array ARGARRAY. */
10598 tree
10599 fold_builtin_call_array (location_t loc, tree type,
10600 tree fn,
10601 int n,
10602 tree *argarray)
10604 tree ret = NULL_TREE;
10605 int i;
10606 tree exp;
10608 if (TREE_CODE (fn) == ADDR_EXPR)
10610 tree fndecl = TREE_OPERAND (fn, 0);
10611 if (TREE_CODE (fndecl) == FUNCTION_DECL
10612 && DECL_BUILT_IN (fndecl))
10614 /* If last argument is __builtin_va_arg_pack (), arguments to this
10615 function are not finalized yet. Defer folding until they are. */
10616 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10618 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10619 if (fndecl2
10620 && TREE_CODE (fndecl2) == FUNCTION_DECL
10621 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10622 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10623 return build_call_array_loc (loc, type, fn, n, argarray);
10625 if (avoid_folding_inline_builtin (fndecl))
10626 return build_call_array_loc (loc, type, fn, n, argarray);
10627 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10629 tree arglist = NULL_TREE;
10630 for (i = n - 1; i >= 0; i--)
10631 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10632 ret = targetm.fold_builtin (fndecl, arglist, false);
10633 if (ret)
10634 return ret;
10635 return build_call_array_loc (loc, type, fn, n, argarray);
10637 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10639 /* First try the transformations that don't require consing up
10640 an exp. */
10641 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10642 if (ret)
10643 return ret;
10646 /* If we got this far, we need to build an exp. */
10647 exp = build_call_array_loc (loc, type, fn, n, argarray);
10648 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10649 return ret ? ret : exp;
10653 return build_call_array_loc (loc, type, fn, n, argarray);
10656 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10657 along with N new arguments specified as the "..." parameters. SKIP
10658 is the number of arguments in EXP to be omitted. This function is used
10659 to do varargs-to-varargs transformations. */
10661 static tree
10662 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10664 int oldnargs = call_expr_nargs (exp);
10665 int nargs = oldnargs - skip + n;
10666 tree fntype = TREE_TYPE (fndecl);
10667 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10668 tree *buffer;
10670 if (n > 0)
10672 int i, j;
10673 va_list ap;
10675 buffer = XALLOCAVEC (tree, nargs);
10676 va_start (ap, n);
10677 for (i = 0; i < n; i++)
10678 buffer[i] = va_arg (ap, tree);
10679 va_end (ap);
10680 for (j = skip; j < oldnargs; j++, i++)
10681 buffer[i] = CALL_EXPR_ARG (exp, j);
10683 else
10684 buffer = CALL_EXPR_ARGP (exp) + skip;
10686 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10689 /* Validate a single argument ARG against a tree code CODE representing
10690 a type. */
10692 static bool
10693 validate_arg (const_tree arg, enum tree_code code)
10695 if (!arg)
10696 return false;
10697 else if (code == POINTER_TYPE)
10698 return POINTER_TYPE_P (TREE_TYPE (arg));
10699 else if (code == INTEGER_TYPE)
10700 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10701 return code == TREE_CODE (TREE_TYPE (arg));
10704 /* This function validates the types of a function call argument list
10705 against a specified list of tree_codes. If the last specifier is a 0,
10706 that represents an ellipses, otherwise the last specifier must be a
10707 VOID_TYPE.
10709 This is the GIMPLE version of validate_arglist. Eventually we want to
10710 completely convert builtins.c to work from GIMPLEs and the tree based
10711 validate_arglist will then be removed. */
10713 bool
10714 validate_gimple_arglist (const_gimple call, ...)
10716 enum tree_code code;
10717 bool res = 0;
10718 va_list ap;
10719 const_tree arg;
10720 size_t i;
10722 va_start (ap, call);
10723 i = 0;
10727 code = (enum tree_code) va_arg (ap, int);
10728 switch (code)
10730 case 0:
10731 /* This signifies an ellipses, any further arguments are all ok. */
10732 res = true;
10733 goto end;
10734 case VOID_TYPE:
10735 /* This signifies an endlink, if no arguments remain, return
10736 true, otherwise return false. */
10737 res = (i == gimple_call_num_args (call));
10738 goto end;
10739 default:
10740 /* If no parameters remain or the parameter's code does not
10741 match the specified code, return false. Otherwise continue
10742 checking any remaining arguments. */
10743 arg = gimple_call_arg (call, i++);
10744 if (!validate_arg (arg, code))
10745 goto end;
10746 break;
10749 while (1);
10751 /* We need gotos here since we can only have one VA_CLOSE in a
10752 function. */
10753 end: ;
10754 va_end (ap);
10756 return res;
10759 /* This function validates the types of a function call argument list
10760 against a specified list of tree_codes. If the last specifier is a 0,
10761 that represents an ellipses, otherwise the last specifier must be a
10762 VOID_TYPE. */
10764 bool
10765 validate_arglist (const_tree callexpr, ...)
10767 enum tree_code code;
10768 bool res = 0;
10769 va_list ap;
10770 const_call_expr_arg_iterator iter;
10771 const_tree arg;
10773 va_start (ap, callexpr);
10774 init_const_call_expr_arg_iterator (callexpr, &iter);
10778 code = (enum tree_code) va_arg (ap, int);
10779 switch (code)
10781 case 0:
10782 /* This signifies an ellipses, any further arguments are all ok. */
10783 res = true;
10784 goto end;
10785 case VOID_TYPE:
10786 /* This signifies an endlink, if no arguments remain, return
10787 true, otherwise return false. */
10788 res = !more_const_call_expr_args_p (&iter);
10789 goto end;
10790 default:
10791 /* If no parameters remain or the parameter's code does not
10792 match the specified code, return false. Otherwise continue
10793 checking any remaining arguments. */
10794 arg = next_const_call_expr_arg (&iter);
10795 if (!validate_arg (arg, code))
10796 goto end;
10797 break;
10800 while (1);
10802 /* We need gotos here since we can only have one VA_CLOSE in a
10803 function. */
10804 end: ;
10805 va_end (ap);
10807 return res;
10810 /* Default target-specific builtin expander that does nothing. */
10813 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10814 rtx target ATTRIBUTE_UNUSED,
10815 rtx subtarget ATTRIBUTE_UNUSED,
10816 enum machine_mode mode ATTRIBUTE_UNUSED,
10817 int ignore ATTRIBUTE_UNUSED)
10819 return NULL_RTX;
10822 /* Returns true is EXP represents data that would potentially reside
10823 in a readonly section. */
10825 static bool
10826 readonly_data_expr (tree exp)
10828 STRIP_NOPS (exp);
10830 if (TREE_CODE (exp) != ADDR_EXPR)
10831 return false;
10833 exp = get_base_address (TREE_OPERAND (exp, 0));
10834 if (!exp)
10835 return false;
10837 /* Make sure we call decl_readonly_section only for trees it
10838 can handle (since it returns true for everything it doesn't
10839 understand). */
10840 if (TREE_CODE (exp) == STRING_CST
10841 || TREE_CODE (exp) == CONSTRUCTOR
10842 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10843 return decl_readonly_section (exp, 0);
10844 else
10845 return false;
10848 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10849 to the call, and TYPE is its return type.
10851 Return NULL_TREE if no simplification was possible, otherwise return the
10852 simplified form of the call as a tree.
10854 The simplified form may be a constant or other expression which
10855 computes the same value, but in a more efficient manner (including
10856 calls to other builtin functions).
10858 The call may contain arguments which need to be evaluated, but
10859 which are not useful to determine the result of the call. In
10860 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10861 COMPOUND_EXPR will be an argument which must be evaluated.
10862 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10863 COMPOUND_EXPR in the chain will contain the tree for the simplified
10864 form of the builtin function call. */
10866 static tree
10867 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10869 if (!validate_arg (s1, POINTER_TYPE)
10870 || !validate_arg (s2, POINTER_TYPE))
10871 return NULL_TREE;
10872 else
10874 tree fn;
10875 const char *p1, *p2;
10877 p2 = c_getstr (s2);
10878 if (p2 == NULL)
10879 return NULL_TREE;
10881 p1 = c_getstr (s1);
10882 if (p1 != NULL)
10884 const char *r = strstr (p1, p2);
10885 tree tem;
10887 if (r == NULL)
10888 return build_int_cst (TREE_TYPE (s1), 0);
10890 /* Return an offset into the constant string argument. */
10891 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10892 s1, size_int (r - p1));
10893 return fold_convert_loc (loc, type, tem);
10896 /* The argument is const char *, and the result is char *, so we need
10897 a type conversion here to avoid a warning. */
10898 if (p2[0] == '\0')
10899 return fold_convert_loc (loc, type, s1);
10901 if (p2[1] != '\0')
10902 return NULL_TREE;
10904 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10905 if (!fn)
10906 return NULL_TREE;
10908 /* New argument list transforming strstr(s1, s2) to
10909 strchr(s1, s2[0]). */
10910 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10914 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10915 the call, and TYPE is its return type.
10917 Return NULL_TREE if no simplification was possible, otherwise return the
10918 simplified form of the call as a tree.
10920 The simplified form may be a constant or other expression which
10921 computes the same value, but in a more efficient manner (including
10922 calls to other builtin functions).
10924 The call may contain arguments which need to be evaluated, but
10925 which are not useful to determine the result of the call. In
10926 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10927 COMPOUND_EXPR will be an argument which must be evaluated.
10928 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10929 COMPOUND_EXPR in the chain will contain the tree for the simplified
10930 form of the builtin function call. */
10932 static tree
10933 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10935 if (!validate_arg (s1, POINTER_TYPE)
10936 || !validate_arg (s2, INTEGER_TYPE))
10937 return NULL_TREE;
10938 else
10940 const char *p1;
10942 if (TREE_CODE (s2) != INTEGER_CST)
10943 return NULL_TREE;
10945 p1 = c_getstr (s1);
10946 if (p1 != NULL)
10948 char c;
10949 const char *r;
10950 tree tem;
10952 if (target_char_cast (s2, &c))
10953 return NULL_TREE;
10955 r = strchr (p1, c);
10957 if (r == NULL)
10958 return build_int_cst (TREE_TYPE (s1), 0);
10960 /* Return an offset into the constant string argument. */
10961 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10962 s1, size_int (r - p1));
10963 return fold_convert_loc (loc, type, tem);
10965 return NULL_TREE;
10969 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10970 the call, and TYPE is its return type.
10972 Return NULL_TREE if no simplification was possible, otherwise return the
10973 simplified form of the call as a tree.
10975 The simplified form may be a constant or other expression which
10976 computes the same value, but in a more efficient manner (including
10977 calls to other builtin functions).
10979 The call may contain arguments which need to be evaluated, but
10980 which are not useful to determine the result of the call. In
10981 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10982 COMPOUND_EXPR will be an argument which must be evaluated.
10983 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10984 COMPOUND_EXPR in the chain will contain the tree for the simplified
10985 form of the builtin function call. */
10987 static tree
10988 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10990 if (!validate_arg (s1, POINTER_TYPE)
10991 || !validate_arg (s2, INTEGER_TYPE))
10992 return NULL_TREE;
10993 else
10995 tree fn;
10996 const char *p1;
10998 if (TREE_CODE (s2) != INTEGER_CST)
10999 return NULL_TREE;
11001 p1 = c_getstr (s1);
11002 if (p1 != NULL)
11004 char c;
11005 const char *r;
11006 tree tem;
11008 if (target_char_cast (s2, &c))
11009 return NULL_TREE;
11011 r = strrchr (p1, c);
11013 if (r == NULL)
11014 return build_int_cst (TREE_TYPE (s1), 0);
11016 /* Return an offset into the constant string argument. */
11017 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11018 s1, size_int (r - p1));
11019 return fold_convert_loc (loc, type, tem);
11022 if (! integer_zerop (s2))
11023 return NULL_TREE;
11025 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11026 if (!fn)
11027 return NULL_TREE;
11029 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11030 return build_call_expr_loc (loc, fn, 2, s1, s2);
11034 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11035 to the call, and TYPE is its return type.
11037 Return NULL_TREE if no simplification was possible, otherwise return the
11038 simplified form of the call as a tree.
11040 The simplified form may be a constant or other expression which
11041 computes the same value, but in a more efficient manner (including
11042 calls to other builtin functions).
11044 The call may contain arguments which need to be evaluated, but
11045 which are not useful to determine the result of the call. In
11046 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11047 COMPOUND_EXPR will be an argument which must be evaluated.
11048 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11049 COMPOUND_EXPR in the chain will contain the tree for the simplified
11050 form of the builtin function call. */
11052 static tree
11053 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11055 if (!validate_arg (s1, POINTER_TYPE)
11056 || !validate_arg (s2, POINTER_TYPE))
11057 return NULL_TREE;
11058 else
11060 tree fn;
11061 const char *p1, *p2;
11063 p2 = c_getstr (s2);
11064 if (p2 == NULL)
11065 return NULL_TREE;
11067 p1 = c_getstr (s1);
11068 if (p1 != NULL)
11070 const char *r = strpbrk (p1, p2);
11071 tree tem;
11073 if (r == NULL)
11074 return build_int_cst (TREE_TYPE (s1), 0);
11076 /* Return an offset into the constant string argument. */
11077 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11078 s1, size_int (r - p1));
11079 return fold_convert_loc (loc, type, tem);
11082 if (p2[0] == '\0')
11083 /* strpbrk(x, "") == NULL.
11084 Evaluate and ignore s1 in case it had side-effects. */
11085 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11087 if (p2[1] != '\0')
11088 return NULL_TREE; /* Really call strpbrk. */
11090 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11091 if (!fn)
11092 return NULL_TREE;
11094 /* New argument list transforming strpbrk(s1, s2) to
11095 strchr(s1, s2[0]). */
11096 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11100 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11101 to the call.
11103 Return NULL_TREE if no simplification was possible, otherwise return the
11104 simplified form of the call as a tree.
11106 The simplified form may be a constant or other expression which
11107 computes the same value, but in a more efficient manner (including
11108 calls to other builtin functions).
11110 The call may contain arguments which need to be evaluated, but
11111 which are not useful to determine the result of the call. In
11112 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11113 COMPOUND_EXPR will be an argument which must be evaluated.
11114 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11115 COMPOUND_EXPR in the chain will contain the tree for the simplified
11116 form of the builtin function call. */
11118 static tree
11119 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11121 if (!validate_arg (dst, POINTER_TYPE)
11122 || !validate_arg (src, POINTER_TYPE))
11123 return NULL_TREE;
11124 else
11126 const char *p = c_getstr (src);
11128 /* If the string length is zero, return the dst parameter. */
11129 if (p && *p == '\0')
11130 return dst;
11132 if (optimize_insn_for_speed_p ())
11134 /* See if we can store by pieces into (dst + strlen(dst)). */
11135 tree newdst, call;
11136 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11137 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11139 if (!strlen_fn || !strcpy_fn)
11140 return NULL_TREE;
11142 /* If we don't have a movstr we don't want to emit an strcpy
11143 call. We have to do that if the length of the source string
11144 isn't computable (in that case we can use memcpy probably
11145 later expanding to a sequence of mov instructions). If we
11146 have movstr instructions we can emit strcpy calls. */
11147 if (!HAVE_movstr)
11149 tree len = c_strlen (src, 1);
11150 if (! len || TREE_SIDE_EFFECTS (len))
11151 return NULL_TREE;
11154 /* Stabilize the argument list. */
11155 dst = builtin_save_expr (dst);
11157 /* Create strlen (dst). */
11158 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11159 /* Create (dst p+ strlen (dst)). */
11161 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11162 TREE_TYPE (dst), dst, newdst);
11163 newdst = builtin_save_expr (newdst);
11165 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11166 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11168 return NULL_TREE;
11172 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11173 arguments to the call.
11175 Return NULL_TREE if no simplification was possible, otherwise return the
11176 simplified form of the call as a tree.
11178 The simplified form may be a constant or other expression which
11179 computes the same value, but in a more efficient manner (including
11180 calls to other builtin functions).
11182 The call may contain arguments which need to be evaluated, but
11183 which are not useful to determine the result of the call. In
11184 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11185 COMPOUND_EXPR will be an argument which must be evaluated.
11186 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11187 COMPOUND_EXPR in the chain will contain the tree for the simplified
11188 form of the builtin function call. */
11190 static tree
11191 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11193 if (!validate_arg (dst, POINTER_TYPE)
11194 || !validate_arg (src, POINTER_TYPE)
11195 || !validate_arg (len, INTEGER_TYPE))
11196 return NULL_TREE;
11197 else
11199 const char *p = c_getstr (src);
11201 /* If the requested length is zero, or the src parameter string
11202 length is zero, return the dst parameter. */
11203 if (integer_zerop (len) || (p && *p == '\0'))
11204 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11206 /* If the requested len is greater than or equal to the string
11207 length, call strcat. */
11208 if (TREE_CODE (len) == INTEGER_CST && p
11209 && compare_tree_int (len, strlen (p)) >= 0)
11211 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11213 /* If the replacement _DECL isn't initialized, don't do the
11214 transformation. */
11215 if (!fn)
11216 return NULL_TREE;
11218 return build_call_expr_loc (loc, fn, 2, dst, src);
11220 return NULL_TREE;
11224 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11225 to the call.
11227 Return NULL_TREE if no simplification was possible, otherwise return the
11228 simplified form of the call as a tree.
11230 The simplified form may be a constant or other expression which
11231 computes the same value, but in a more efficient manner (including
11232 calls to other builtin functions).
11234 The call may contain arguments which need to be evaluated, but
11235 which are not useful to determine the result of the call. In
11236 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11237 COMPOUND_EXPR will be an argument which must be evaluated.
11238 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11239 COMPOUND_EXPR in the chain will contain the tree for the simplified
11240 form of the builtin function call. */
11242 static tree
11243 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11245 if (!validate_arg (s1, POINTER_TYPE)
11246 || !validate_arg (s2, POINTER_TYPE))
11247 return NULL_TREE;
11248 else
11250 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11252 /* If both arguments are constants, evaluate at compile-time. */
11253 if (p1 && p2)
11255 const size_t r = strspn (p1, p2);
11256 return size_int (r);
11259 /* If either argument is "", return NULL_TREE. */
11260 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11261 /* Evaluate and ignore both arguments in case either one has
11262 side-effects. */
11263 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11264 s1, s2);
11265 return NULL_TREE;
11269 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11270 to the call.
11272 Return NULL_TREE if no simplification was possible, otherwise return the
11273 simplified form of the call as a tree.
11275 The simplified form may be a constant or other expression which
11276 computes the same value, but in a more efficient manner (including
11277 calls to other builtin functions).
11279 The call may contain arguments which need to be evaluated, but
11280 which are not useful to determine the result of the call. In
11281 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11282 COMPOUND_EXPR will be an argument which must be evaluated.
11283 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11284 COMPOUND_EXPR in the chain will contain the tree for the simplified
11285 form of the builtin function call. */
11287 static tree
11288 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11290 if (!validate_arg (s1, POINTER_TYPE)
11291 || !validate_arg (s2, POINTER_TYPE))
11292 return NULL_TREE;
11293 else
11295 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11297 /* If both arguments are constants, evaluate at compile-time. */
11298 if (p1 && p2)
11300 const size_t r = strcspn (p1, p2);
11301 return size_int (r);
11304 /* If the first argument is "", return NULL_TREE. */
11305 if (p1 && *p1 == '\0')
11307 /* Evaluate and ignore argument s2 in case it has
11308 side-effects. */
11309 return omit_one_operand_loc (loc, size_type_node,
11310 size_zero_node, s2);
11313 /* If the second argument is "", return __builtin_strlen(s1). */
11314 if (p2 && *p2 == '\0')
11316 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11318 /* If the replacement _DECL isn't initialized, don't do the
11319 transformation. */
11320 if (!fn)
11321 return NULL_TREE;
11323 return build_call_expr_loc (loc, fn, 1, s1);
11325 return NULL_TREE;
11329 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11330 to the call. IGNORE is true if the value returned
11331 by the builtin will be ignored. UNLOCKED is true is true if this
11332 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11333 the known length of the string. Return NULL_TREE if no simplification
11334 was possible. */
11336 tree
11337 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11338 bool ignore, bool unlocked, tree len)
11340 /* If we're using an unlocked function, assume the other unlocked
11341 functions exist explicitly. */
11342 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11343 : implicit_built_in_decls[BUILT_IN_FPUTC];
11344 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11345 : implicit_built_in_decls[BUILT_IN_FWRITE];
11347 /* If the return value is used, don't do the transformation. */
11348 if (!ignore)
11349 return NULL_TREE;
11351 /* Verify the arguments in the original call. */
11352 if (!validate_arg (arg0, POINTER_TYPE)
11353 || !validate_arg (arg1, POINTER_TYPE))
11354 return NULL_TREE;
11356 if (! len)
11357 len = c_strlen (arg0, 0);
11359 /* Get the length of the string passed to fputs. If the length
11360 can't be determined, punt. */
11361 if (!len
11362 || TREE_CODE (len) != INTEGER_CST)
11363 return NULL_TREE;
11365 switch (compare_tree_int (len, 1))
11367 case -1: /* length is 0, delete the call entirely . */
11368 return omit_one_operand_loc (loc, integer_type_node,
11369 integer_zero_node, arg1);;
11371 case 0: /* length is 1, call fputc. */
11373 const char *p = c_getstr (arg0);
11375 if (p != NULL)
11377 if (fn_fputc)
11378 return build_call_expr_loc (loc, fn_fputc, 2,
11379 build_int_cst (NULL_TREE, p[0]), arg1);
11380 else
11381 return NULL_TREE;
11384 /* FALLTHROUGH */
11385 case 1: /* length is greater than 1, call fwrite. */
11387 /* If optimizing for size keep fputs. */
11388 if (optimize_function_for_size_p (cfun))
11389 return NULL_TREE;
11390 /* New argument list transforming fputs(string, stream) to
11391 fwrite(string, 1, len, stream). */
11392 if (fn_fwrite)
11393 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11394 size_one_node, len, arg1);
11395 else
11396 return NULL_TREE;
11398 default:
11399 gcc_unreachable ();
11401 return NULL_TREE;
11404 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11405 produced. False otherwise. This is done so that we don't output the error
11406 or warning twice or three times. */
11408 bool
11409 fold_builtin_next_arg (tree exp, bool va_start_p)
11411 tree fntype = TREE_TYPE (current_function_decl);
11412 int nargs = call_expr_nargs (exp);
11413 tree arg;
11415 if (TYPE_ARG_TYPES (fntype) == 0
11416 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11417 == void_type_node))
11419 error ("%<va_start%> used in function with fixed args");
11420 return true;
11423 if (va_start_p)
11425 if (va_start_p && (nargs != 2))
11427 error ("wrong number of arguments to function %<va_start%>");
11428 return true;
11430 arg = CALL_EXPR_ARG (exp, 1);
11432 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11433 when we checked the arguments and if needed issued a warning. */
11434 else
11436 if (nargs == 0)
11438 /* Evidently an out of date version of <stdarg.h>; can't validate
11439 va_start's second argument, but can still work as intended. */
11440 warning (0, "%<__builtin_next_arg%> called without an argument");
11441 return true;
11443 else if (nargs > 1)
11445 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11446 return true;
11448 arg = CALL_EXPR_ARG (exp, 0);
11451 if (TREE_CODE (arg) == SSA_NAME)
11452 arg = SSA_NAME_VAR (arg);
11454 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11455 or __builtin_next_arg (0) the first time we see it, after checking
11456 the arguments and if needed issuing a warning. */
11457 if (!integer_zerop (arg))
11459 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11461 /* Strip off all nops for the sake of the comparison. This
11462 is not quite the same as STRIP_NOPS. It does more.
11463 We must also strip off INDIRECT_EXPR for C++ reference
11464 parameters. */
11465 while (CONVERT_EXPR_P (arg)
11466 || TREE_CODE (arg) == INDIRECT_REF)
11467 arg = TREE_OPERAND (arg, 0);
11468 if (arg != last_parm)
11470 /* FIXME: Sometimes with the tree optimizers we can get the
11471 not the last argument even though the user used the last
11472 argument. We just warn and set the arg to be the last
11473 argument so that we will get wrong-code because of
11474 it. */
11475 warning (0, "second parameter of %<va_start%> not last named argument");
11478 /* Undefined by C99 7.15.1.4p4 (va_start):
11479 "If the parameter parmN is declared with the register storage
11480 class, with a function or array type, or with a type that is
11481 not compatible with the type that results after application of
11482 the default argument promotions, the behavior is undefined."
11484 else if (DECL_REGISTER (arg))
11485 warning (0, "undefined behaviour when second parameter of "
11486 "%<va_start%> is declared with %<register%> storage");
11488 /* We want to verify the second parameter just once before the tree
11489 optimizers are run and then avoid keeping it in the tree,
11490 as otherwise we could warn even for correct code like:
11491 void foo (int i, ...)
11492 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11493 if (va_start_p)
11494 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11495 else
11496 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11498 return false;
11502 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11503 ORIG may be null if this is a 2-argument call. We don't attempt to
11504 simplify calls with more than 3 arguments.
11506 Return NULL_TREE if no simplification was possible, otherwise return the
11507 simplified form of the call as a tree. If IGNORED is true, it means that
11508 the caller does not use the returned value of the function. */
11510 static tree
11511 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11512 tree orig, int ignored)
11514 tree call, retval;
11515 const char *fmt_str = NULL;
11517 /* Verify the required arguments in the original call. We deal with two
11518 types of sprintf() calls: 'sprintf (str, fmt)' and
11519 'sprintf (dest, "%s", orig)'. */
11520 if (!validate_arg (dest, POINTER_TYPE)
11521 || !validate_arg (fmt, POINTER_TYPE))
11522 return NULL_TREE;
11523 if (orig && !validate_arg (orig, POINTER_TYPE))
11524 return NULL_TREE;
11526 /* Check whether the format is a literal string constant. */
11527 fmt_str = c_getstr (fmt);
11528 if (fmt_str == NULL)
11529 return NULL_TREE;
11531 call = NULL_TREE;
11532 retval = NULL_TREE;
11534 if (!init_target_chars ())
11535 return NULL_TREE;
11537 /* If the format doesn't contain % args or %%, use strcpy. */
11538 if (strchr (fmt_str, target_percent) == NULL)
11540 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11542 if (!fn)
11543 return NULL_TREE;
11545 /* Don't optimize sprintf (buf, "abc", ptr++). */
11546 if (orig)
11547 return NULL_TREE;
11549 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11550 'format' is known to contain no % formats. */
11551 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11552 if (!ignored)
11553 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11556 /* If the format is "%s", use strcpy if the result isn't used. */
11557 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11559 tree fn;
11560 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11562 if (!fn)
11563 return NULL_TREE;
11565 /* Don't crash on sprintf (str1, "%s"). */
11566 if (!orig)
11567 return NULL_TREE;
11569 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11570 if (!ignored)
11572 retval = c_strlen (orig, 1);
11573 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11574 return NULL_TREE;
11576 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11579 if (call && retval)
11581 retval = fold_convert_loc
11582 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11583 retval);
11584 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11586 else
11587 return call;
11590 /* Expand a call EXP to __builtin_object_size. */
11593 expand_builtin_object_size (tree exp)
11595 tree ost;
11596 int object_size_type;
11597 tree fndecl = get_callee_fndecl (exp);
11599 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11601 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11602 exp, fndecl);
11603 expand_builtin_trap ();
11604 return const0_rtx;
11607 ost = CALL_EXPR_ARG (exp, 1);
11608 STRIP_NOPS (ost);
11610 if (TREE_CODE (ost) != INTEGER_CST
11611 || tree_int_cst_sgn (ost) < 0
11612 || compare_tree_int (ost, 3) > 0)
11614 error ("%Klast argument of %D is not integer constant between 0 and 3",
11615 exp, fndecl);
11616 expand_builtin_trap ();
11617 return const0_rtx;
11620 object_size_type = tree_low_cst (ost, 0);
11622 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11625 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11626 FCODE is the BUILT_IN_* to use.
11627 Return NULL_RTX if we failed; the caller should emit a normal call,
11628 otherwise try to get the result in TARGET, if convenient (and in
11629 mode MODE if that's convenient). */
11631 static rtx
11632 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11633 enum built_in_function fcode)
11635 tree dest, src, len, size;
11637 if (!validate_arglist (exp,
11638 POINTER_TYPE,
11639 fcode == BUILT_IN_MEMSET_CHK
11640 ? INTEGER_TYPE : POINTER_TYPE,
11641 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11642 return NULL_RTX;
11644 dest = CALL_EXPR_ARG (exp, 0);
11645 src = CALL_EXPR_ARG (exp, 1);
11646 len = CALL_EXPR_ARG (exp, 2);
11647 size = CALL_EXPR_ARG (exp, 3);
11649 if (! host_integerp (size, 1))
11650 return NULL_RTX;
11652 if (host_integerp (len, 1) || integer_all_onesp (size))
11654 tree fn;
11656 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11658 warning_at (tree_nonartificial_location (exp),
11659 0, "%Kcall to %D will always overflow destination buffer",
11660 exp, get_callee_fndecl (exp));
11661 return NULL_RTX;
11664 fn = NULL_TREE;
11665 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11666 mem{cpy,pcpy,move,set} is available. */
11667 switch (fcode)
11669 case BUILT_IN_MEMCPY_CHK:
11670 fn = built_in_decls[BUILT_IN_MEMCPY];
11671 break;
11672 case BUILT_IN_MEMPCPY_CHK:
11673 fn = built_in_decls[BUILT_IN_MEMPCPY];
11674 break;
11675 case BUILT_IN_MEMMOVE_CHK:
11676 fn = built_in_decls[BUILT_IN_MEMMOVE];
11677 break;
11678 case BUILT_IN_MEMSET_CHK:
11679 fn = built_in_decls[BUILT_IN_MEMSET];
11680 break;
11681 default:
11682 break;
11685 if (! fn)
11686 return NULL_RTX;
11688 fn = build_call_nofold (fn, 3, dest, src, len);
11689 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11690 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11691 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11693 else if (fcode == BUILT_IN_MEMSET_CHK)
11694 return NULL_RTX;
11695 else
11697 unsigned int dest_align
11698 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11700 /* If DEST is not a pointer type, call the normal function. */
11701 if (dest_align == 0)
11702 return NULL_RTX;
11704 /* If SRC and DEST are the same (and not volatile), do nothing. */
11705 if (operand_equal_p (src, dest, 0))
11707 tree expr;
11709 if (fcode != BUILT_IN_MEMPCPY_CHK)
11711 /* Evaluate and ignore LEN in case it has side-effects. */
11712 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11713 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11716 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11717 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11720 /* __memmove_chk special case. */
11721 if (fcode == BUILT_IN_MEMMOVE_CHK)
11723 unsigned int src_align
11724 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11726 if (src_align == 0)
11727 return NULL_RTX;
11729 /* If src is categorized for a readonly section we can use
11730 normal __memcpy_chk. */
11731 if (readonly_data_expr (src))
11733 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11734 if (!fn)
11735 return NULL_RTX;
11736 fn = build_call_nofold (fn, 4, dest, src, len, size);
11737 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11738 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11739 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11742 return NULL_RTX;
11746 /* Emit warning if a buffer overflow is detected at compile time. */
11748 static void
11749 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11751 int is_strlen = 0;
11752 tree len, size;
11753 location_t loc = tree_nonartificial_location (exp);
11755 switch (fcode)
11757 case BUILT_IN_STRCPY_CHK:
11758 case BUILT_IN_STPCPY_CHK:
11759 /* For __strcat_chk the warning will be emitted only if overflowing
11760 by at least strlen (dest) + 1 bytes. */
11761 case BUILT_IN_STRCAT_CHK:
11762 len = CALL_EXPR_ARG (exp, 1);
11763 size = CALL_EXPR_ARG (exp, 2);
11764 is_strlen = 1;
11765 break;
11766 case BUILT_IN_STRNCAT_CHK:
11767 case BUILT_IN_STRNCPY_CHK:
11768 len = CALL_EXPR_ARG (exp, 2);
11769 size = CALL_EXPR_ARG (exp, 3);
11770 break;
11771 case BUILT_IN_SNPRINTF_CHK:
11772 case BUILT_IN_VSNPRINTF_CHK:
11773 len = CALL_EXPR_ARG (exp, 1);
11774 size = CALL_EXPR_ARG (exp, 3);
11775 break;
11776 default:
11777 gcc_unreachable ();
11780 if (!len || !size)
11781 return;
11783 if (! host_integerp (size, 1) || integer_all_onesp (size))
11784 return;
11786 if (is_strlen)
11788 len = c_strlen (len, 1);
11789 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11790 return;
11792 else if (fcode == BUILT_IN_STRNCAT_CHK)
11794 tree src = CALL_EXPR_ARG (exp, 1);
11795 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11796 return;
11797 src = c_strlen (src, 1);
11798 if (! src || ! host_integerp (src, 1))
11800 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11801 exp, get_callee_fndecl (exp));
11802 return;
11804 else if (tree_int_cst_lt (src, size))
11805 return;
11807 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11808 return;
11810 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11811 exp, get_callee_fndecl (exp));
11814 /* Emit warning if a buffer overflow is detected at compile time
11815 in __sprintf_chk/__vsprintf_chk calls. */
11817 static void
11818 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11820 tree size, len, fmt;
11821 const char *fmt_str;
11822 int nargs = call_expr_nargs (exp);
11824 /* Verify the required arguments in the original call. */
11826 if (nargs < 4)
11827 return;
11828 size = CALL_EXPR_ARG (exp, 2);
11829 fmt = CALL_EXPR_ARG (exp, 3);
11831 if (! host_integerp (size, 1) || integer_all_onesp (size))
11832 return;
11834 /* Check whether the format is a literal string constant. */
11835 fmt_str = c_getstr (fmt);
11836 if (fmt_str == NULL)
11837 return;
11839 if (!init_target_chars ())
11840 return;
11842 /* If the format doesn't contain % args or %%, we know its size. */
11843 if (strchr (fmt_str, target_percent) == 0)
11844 len = build_int_cstu (size_type_node, strlen (fmt_str));
11845 /* If the format is "%s" and first ... argument is a string literal,
11846 we know it too. */
11847 else if (fcode == BUILT_IN_SPRINTF_CHK
11848 && strcmp (fmt_str, target_percent_s) == 0)
11850 tree arg;
11852 if (nargs < 5)
11853 return;
11854 arg = CALL_EXPR_ARG (exp, 4);
11855 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11856 return;
11858 len = c_strlen (arg, 1);
11859 if (!len || ! host_integerp (len, 1))
11860 return;
11862 else
11863 return;
11865 if (! tree_int_cst_lt (len, size))
11866 warning_at (tree_nonartificial_location (exp),
11867 0, "%Kcall to %D will always overflow destination buffer",
11868 exp, get_callee_fndecl (exp));
11871 /* Emit warning if a free is called with address of a variable. */
11873 static void
11874 maybe_emit_free_warning (tree exp)
11876 tree arg = CALL_EXPR_ARG (exp, 0);
11878 STRIP_NOPS (arg);
11879 if (TREE_CODE (arg) != ADDR_EXPR)
11880 return;
11882 arg = get_base_address (TREE_OPERAND (arg, 0));
11883 if (arg == NULL || INDIRECT_REF_P (arg))
11884 return;
11886 if (SSA_VAR_P (arg))
11887 warning_at (tree_nonartificial_location (exp),
11888 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11889 else
11890 warning_at (tree_nonartificial_location (exp),
11891 0, "%Kattempt to free a non-heap object", exp);
11894 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11895 if possible. */
11897 tree
11898 fold_builtin_object_size (tree ptr, tree ost)
11900 tree ret = NULL_TREE;
11901 int object_size_type;
11903 if (!validate_arg (ptr, POINTER_TYPE)
11904 || !validate_arg (ost, INTEGER_TYPE))
11905 return NULL_TREE;
11907 STRIP_NOPS (ost);
11909 if (TREE_CODE (ost) != INTEGER_CST
11910 || tree_int_cst_sgn (ost) < 0
11911 || compare_tree_int (ost, 3) > 0)
11912 return NULL_TREE;
11914 object_size_type = tree_low_cst (ost, 0);
11916 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11917 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11918 and (size_t) 0 for types 2 and 3. */
11919 if (TREE_SIDE_EFFECTS (ptr))
11920 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11922 if (TREE_CODE (ptr) == ADDR_EXPR)
11923 ret = build_int_cstu (size_type_node,
11924 compute_builtin_object_size (ptr, object_size_type));
11926 else if (TREE_CODE (ptr) == SSA_NAME)
11928 unsigned HOST_WIDE_INT bytes;
11930 /* If object size is not known yet, delay folding until
11931 later. Maybe subsequent passes will help determining
11932 it. */
11933 bytes = compute_builtin_object_size (ptr, object_size_type);
11934 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11935 ? -1 : 0))
11936 ret = build_int_cstu (size_type_node, bytes);
11939 if (ret)
11941 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11942 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11943 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11944 ret = NULL_TREE;
11947 return ret;
11950 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11951 DEST, SRC, LEN, and SIZE are the arguments to the call.
11952 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11953 code of the builtin. If MAXLEN is not NULL, it is maximum length
11954 passed as third argument. */
11956 tree
11957 fold_builtin_memory_chk (location_t loc, tree fndecl,
11958 tree dest, tree src, tree len, tree size,
11959 tree maxlen, bool ignore,
11960 enum built_in_function fcode)
11962 tree fn;
11964 if (!validate_arg (dest, POINTER_TYPE)
11965 || !validate_arg (src,
11966 (fcode == BUILT_IN_MEMSET_CHK
11967 ? INTEGER_TYPE : POINTER_TYPE))
11968 || !validate_arg (len, INTEGER_TYPE)
11969 || !validate_arg (size, INTEGER_TYPE))
11970 return NULL_TREE;
11972 /* If SRC and DEST are the same (and not volatile), return DEST
11973 (resp. DEST+LEN for __mempcpy_chk). */
11974 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11976 if (fcode != BUILT_IN_MEMPCPY_CHK)
11977 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
11978 dest, len);
11979 else
11981 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
11982 dest, len);
11983 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
11987 if (! host_integerp (size, 1))
11988 return NULL_TREE;
11990 if (! integer_all_onesp (size))
11992 if (! host_integerp (len, 1))
11994 /* If LEN is not constant, try MAXLEN too.
11995 For MAXLEN only allow optimizing into non-_ocs function
11996 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11997 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11999 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12001 /* (void) __mempcpy_chk () can be optimized into
12002 (void) __memcpy_chk (). */
12003 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12004 if (!fn)
12005 return NULL_TREE;
12007 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12009 return NULL_TREE;
12012 else
12013 maxlen = len;
12015 if (tree_int_cst_lt (size, maxlen))
12016 return NULL_TREE;
12019 fn = NULL_TREE;
12020 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12021 mem{cpy,pcpy,move,set} is available. */
12022 switch (fcode)
12024 case BUILT_IN_MEMCPY_CHK:
12025 fn = built_in_decls[BUILT_IN_MEMCPY];
12026 break;
12027 case BUILT_IN_MEMPCPY_CHK:
12028 fn = built_in_decls[BUILT_IN_MEMPCPY];
12029 break;
12030 case BUILT_IN_MEMMOVE_CHK:
12031 fn = built_in_decls[BUILT_IN_MEMMOVE];
12032 break;
12033 case BUILT_IN_MEMSET_CHK:
12034 fn = built_in_decls[BUILT_IN_MEMSET];
12035 break;
12036 default:
12037 break;
12040 if (!fn)
12041 return NULL_TREE;
12043 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12046 /* Fold a call to the __st[rp]cpy_chk builtin.
12047 DEST, SRC, and SIZE are the arguments to the call.
12048 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12049 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12050 strings passed as second argument. */
12052 tree
12053 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12054 tree src, tree size,
12055 tree maxlen, bool ignore,
12056 enum built_in_function fcode)
12058 tree len, fn;
12060 if (!validate_arg (dest, POINTER_TYPE)
12061 || !validate_arg (src, POINTER_TYPE)
12062 || !validate_arg (size, INTEGER_TYPE))
12063 return NULL_TREE;
12065 /* If SRC and DEST are the same (and not volatile), return DEST. */
12066 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12067 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12069 if (! host_integerp (size, 1))
12070 return NULL_TREE;
12072 if (! integer_all_onesp (size))
12074 len = c_strlen (src, 1);
12075 if (! len || ! host_integerp (len, 1))
12077 /* If LEN is not constant, try MAXLEN too.
12078 For MAXLEN only allow optimizing into non-_ocs function
12079 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12080 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12082 if (fcode == BUILT_IN_STPCPY_CHK)
12084 if (! ignore)
12085 return NULL_TREE;
12087 /* If return value of __stpcpy_chk is ignored,
12088 optimize into __strcpy_chk. */
12089 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12090 if (!fn)
12091 return NULL_TREE;
12093 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12096 if (! len || TREE_SIDE_EFFECTS (len))
12097 return NULL_TREE;
12099 /* If c_strlen returned something, but not a constant,
12100 transform __strcpy_chk into __memcpy_chk. */
12101 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12102 if (!fn)
12103 return NULL_TREE;
12105 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12106 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12107 build_call_expr_loc (loc, fn, 4,
12108 dest, src, len, size));
12111 else
12112 maxlen = len;
12114 if (! tree_int_cst_lt (maxlen, size))
12115 return NULL_TREE;
12118 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12119 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12120 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12121 if (!fn)
12122 return NULL_TREE;
12124 return build_call_expr_loc (loc, fn, 2, dest, src);
12127 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12128 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12129 length passed as third argument. */
12131 tree
12132 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12133 tree len, tree size, tree maxlen)
12135 tree fn;
12137 if (!validate_arg (dest, POINTER_TYPE)
12138 || !validate_arg (src, POINTER_TYPE)
12139 || !validate_arg (len, INTEGER_TYPE)
12140 || !validate_arg (size, INTEGER_TYPE))
12141 return NULL_TREE;
12143 if (! host_integerp (size, 1))
12144 return NULL_TREE;
12146 if (! integer_all_onesp (size))
12148 if (! host_integerp (len, 1))
12150 /* If LEN is not constant, try MAXLEN too.
12151 For MAXLEN only allow optimizing into non-_ocs function
12152 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12153 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12154 return NULL_TREE;
12156 else
12157 maxlen = len;
12159 if (tree_int_cst_lt (size, maxlen))
12160 return NULL_TREE;
12163 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12164 fn = built_in_decls[BUILT_IN_STRNCPY];
12165 if (!fn)
12166 return NULL_TREE;
12168 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12171 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12172 are the arguments to the call. */
12174 static tree
12175 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12176 tree src, tree size)
12178 tree fn;
12179 const char *p;
12181 if (!validate_arg (dest, POINTER_TYPE)
12182 || !validate_arg (src, POINTER_TYPE)
12183 || !validate_arg (size, INTEGER_TYPE))
12184 return NULL_TREE;
12186 p = c_getstr (src);
12187 /* If the SRC parameter is "", return DEST. */
12188 if (p && *p == '\0')
12189 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12191 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12192 return NULL_TREE;
12194 /* If __builtin_strcat_chk is used, assume strcat is available. */
12195 fn = built_in_decls[BUILT_IN_STRCAT];
12196 if (!fn)
12197 return NULL_TREE;
12199 return build_call_expr_loc (loc, fn, 2, dest, src);
12202 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12203 LEN, and SIZE. */
12205 static tree
12206 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12207 tree dest, tree src, tree len, tree size)
12209 tree fn;
12210 const char *p;
12212 if (!validate_arg (dest, POINTER_TYPE)
12213 || !validate_arg (src, POINTER_TYPE)
12214 || !validate_arg (size, INTEGER_TYPE)
12215 || !validate_arg (size, INTEGER_TYPE))
12216 return NULL_TREE;
12218 p = c_getstr (src);
12219 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12220 if (p && *p == '\0')
12221 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12222 else if (integer_zerop (len))
12223 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12225 if (! host_integerp (size, 1))
12226 return NULL_TREE;
12228 if (! integer_all_onesp (size))
12230 tree src_len = c_strlen (src, 1);
12231 if (src_len
12232 && host_integerp (src_len, 1)
12233 && host_integerp (len, 1)
12234 && ! tree_int_cst_lt (len, src_len))
12236 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12237 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12238 if (!fn)
12239 return NULL_TREE;
12241 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12243 return NULL_TREE;
12246 /* If __builtin_strncat_chk is used, assume strncat is available. */
12247 fn = built_in_decls[BUILT_IN_STRNCAT];
12248 if (!fn)
12249 return NULL_TREE;
12251 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12254 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12255 a normal call should be emitted rather than expanding the function
12256 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12258 static tree
12259 fold_builtin_sprintf_chk (location_t loc, tree exp,
12260 enum built_in_function fcode)
12262 tree dest, size, len, fn, fmt, flag;
12263 const char *fmt_str;
12264 int nargs = call_expr_nargs (exp);
12266 /* Verify the required arguments in the original call. */
12267 if (nargs < 4)
12268 return NULL_TREE;
12269 dest = CALL_EXPR_ARG (exp, 0);
12270 if (!validate_arg (dest, POINTER_TYPE))
12271 return NULL_TREE;
12272 flag = CALL_EXPR_ARG (exp, 1);
12273 if (!validate_arg (flag, INTEGER_TYPE))
12274 return NULL_TREE;
12275 size = CALL_EXPR_ARG (exp, 2);
12276 if (!validate_arg (size, INTEGER_TYPE))
12277 return NULL_TREE;
12278 fmt = CALL_EXPR_ARG (exp, 3);
12279 if (!validate_arg (fmt, POINTER_TYPE))
12280 return NULL_TREE;
12282 if (! host_integerp (size, 1))
12283 return NULL_TREE;
12285 len = NULL_TREE;
12287 if (!init_target_chars ())
12288 return NULL_TREE;
12290 /* Check whether the format is a literal string constant. */
12291 fmt_str = c_getstr (fmt);
12292 if (fmt_str != NULL)
12294 /* If the format doesn't contain % args or %%, we know the size. */
12295 if (strchr (fmt_str, target_percent) == 0)
12297 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12298 len = build_int_cstu (size_type_node, strlen (fmt_str));
12300 /* If the format is "%s" and first ... argument is a string literal,
12301 we know the size too. */
12302 else if (fcode == BUILT_IN_SPRINTF_CHK
12303 && strcmp (fmt_str, target_percent_s) == 0)
12305 tree arg;
12307 if (nargs == 5)
12309 arg = CALL_EXPR_ARG (exp, 4);
12310 if (validate_arg (arg, POINTER_TYPE))
12312 len = c_strlen (arg, 1);
12313 if (! len || ! host_integerp (len, 1))
12314 len = NULL_TREE;
12320 if (! integer_all_onesp (size))
12322 if (! len || ! tree_int_cst_lt (len, size))
12323 return NULL_TREE;
12326 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12327 or if format doesn't contain % chars or is "%s". */
12328 if (! integer_zerop (flag))
12330 if (fmt_str == NULL)
12331 return NULL_TREE;
12332 if (strchr (fmt_str, target_percent) != NULL
12333 && strcmp (fmt_str, target_percent_s))
12334 return NULL_TREE;
12337 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12338 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12339 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12340 if (!fn)
12341 return NULL_TREE;
12343 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12346 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12347 a normal call should be emitted rather than expanding the function
12348 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12349 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12350 passed as second argument. */
12352 tree
12353 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12354 enum built_in_function fcode)
12356 tree dest, size, len, fn, fmt, flag;
12357 const char *fmt_str;
12359 /* Verify the required arguments in the original call. */
12360 if (call_expr_nargs (exp) < 5)
12361 return NULL_TREE;
12362 dest = CALL_EXPR_ARG (exp, 0);
12363 if (!validate_arg (dest, POINTER_TYPE))
12364 return NULL_TREE;
12365 len = CALL_EXPR_ARG (exp, 1);
12366 if (!validate_arg (len, INTEGER_TYPE))
12367 return NULL_TREE;
12368 flag = CALL_EXPR_ARG (exp, 2);
12369 if (!validate_arg (flag, INTEGER_TYPE))
12370 return NULL_TREE;
12371 size = CALL_EXPR_ARG (exp, 3);
12372 if (!validate_arg (size, INTEGER_TYPE))
12373 return NULL_TREE;
12374 fmt = CALL_EXPR_ARG (exp, 4);
12375 if (!validate_arg (fmt, POINTER_TYPE))
12376 return NULL_TREE;
12378 if (! host_integerp (size, 1))
12379 return NULL_TREE;
12381 if (! integer_all_onesp (size))
12383 if (! host_integerp (len, 1))
12385 /* If LEN is not constant, try MAXLEN too.
12386 For MAXLEN only allow optimizing into non-_ocs function
12387 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12388 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12389 return NULL_TREE;
12391 else
12392 maxlen = len;
12394 if (tree_int_cst_lt (size, maxlen))
12395 return NULL_TREE;
12398 if (!init_target_chars ())
12399 return NULL_TREE;
12401 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12402 or if format doesn't contain % chars or is "%s". */
12403 if (! integer_zerop (flag))
12405 fmt_str = c_getstr (fmt);
12406 if (fmt_str == NULL)
12407 return NULL_TREE;
12408 if (strchr (fmt_str, target_percent) != NULL
12409 && strcmp (fmt_str, target_percent_s))
12410 return NULL_TREE;
12413 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12414 available. */
12415 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12416 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12417 if (!fn)
12418 return NULL_TREE;
12420 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12423 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12424 FMT and ARG are the arguments to the call; we don't fold cases with
12425 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12427 Return NULL_TREE if no simplification was possible, otherwise return the
12428 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12429 code of the function to be simplified. */
12431 static tree
12432 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12433 tree arg, bool ignore,
12434 enum built_in_function fcode)
12436 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12437 const char *fmt_str = NULL;
12439 /* If the return value is used, don't do the transformation. */
12440 if (! ignore)
12441 return NULL_TREE;
12443 /* Verify the required arguments in the original call. */
12444 if (!validate_arg (fmt, POINTER_TYPE))
12445 return NULL_TREE;
12447 /* Check whether the format is a literal string constant. */
12448 fmt_str = c_getstr (fmt);
12449 if (fmt_str == NULL)
12450 return NULL_TREE;
12452 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12454 /* If we're using an unlocked function, assume the other
12455 unlocked functions exist explicitly. */
12456 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12457 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12459 else
12461 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12462 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12465 if (!init_target_chars ())
12466 return NULL_TREE;
12468 if (strcmp (fmt_str, target_percent_s) == 0
12469 || strchr (fmt_str, target_percent) == NULL)
12471 const char *str;
12473 if (strcmp (fmt_str, target_percent_s) == 0)
12475 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12476 return NULL_TREE;
12478 if (!arg || !validate_arg (arg, POINTER_TYPE))
12479 return NULL_TREE;
12481 str = c_getstr (arg);
12482 if (str == NULL)
12483 return NULL_TREE;
12485 else
12487 /* The format specifier doesn't contain any '%' characters. */
12488 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12489 && arg)
12490 return NULL_TREE;
12491 str = fmt_str;
12494 /* If the string was "", printf does nothing. */
12495 if (str[0] == '\0')
12496 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12498 /* If the string has length of 1, call putchar. */
12499 if (str[1] == '\0')
12501 /* Given printf("c"), (where c is any one character,)
12502 convert "c"[0] to an int and pass that to the replacement
12503 function. */
12504 newarg = build_int_cst (NULL_TREE, str[0]);
12505 if (fn_putchar)
12506 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12508 else
12510 /* If the string was "string\n", call puts("string"). */
12511 size_t len = strlen (str);
12512 if ((unsigned char)str[len - 1] == target_newline)
12514 /* Create a NUL-terminated string that's one char shorter
12515 than the original, stripping off the trailing '\n'. */
12516 char *newstr = XALLOCAVEC (char, len);
12517 memcpy (newstr, str, len - 1);
12518 newstr[len - 1] = 0;
12520 newarg = build_string_literal (len, newstr);
12521 if (fn_puts)
12522 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12524 else
12525 /* We'd like to arrange to call fputs(string,stdout) here,
12526 but we need stdout and don't have a way to get it yet. */
12527 return NULL_TREE;
12531 /* The other optimizations can be done only on the non-va_list variants. */
12532 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12533 return NULL_TREE;
12535 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12536 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12538 if (!arg || !validate_arg (arg, POINTER_TYPE))
12539 return NULL_TREE;
12540 if (fn_puts)
12541 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12544 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12545 else if (strcmp (fmt_str, target_percent_c) == 0)
12547 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12548 return NULL_TREE;
12549 if (fn_putchar)
12550 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12553 if (!call)
12554 return NULL_TREE;
12556 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12559 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12560 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12561 more than 3 arguments, and ARG may be null in the 2-argument case.
12563 Return NULL_TREE if no simplification was possible, otherwise return the
12564 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12565 code of the function to be simplified. */
12567 static tree
12568 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12569 tree fmt, tree arg, bool ignore,
12570 enum built_in_function fcode)
12572 tree fn_fputc, fn_fputs, call = NULL_TREE;
12573 const char *fmt_str = NULL;
12575 /* If the return value is used, don't do the transformation. */
12576 if (! ignore)
12577 return NULL_TREE;
12579 /* Verify the required arguments in the original call. */
12580 if (!validate_arg (fp, POINTER_TYPE))
12581 return NULL_TREE;
12582 if (!validate_arg (fmt, POINTER_TYPE))
12583 return NULL_TREE;
12585 /* Check whether the format is a literal string constant. */
12586 fmt_str = c_getstr (fmt);
12587 if (fmt_str == NULL)
12588 return NULL_TREE;
12590 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12592 /* If we're using an unlocked function, assume the other
12593 unlocked functions exist explicitly. */
12594 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12595 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12597 else
12599 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12600 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12603 if (!init_target_chars ())
12604 return NULL_TREE;
12606 /* If the format doesn't contain % args or %%, use strcpy. */
12607 if (strchr (fmt_str, target_percent) == NULL)
12609 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12610 && arg)
12611 return NULL_TREE;
12613 /* If the format specifier was "", fprintf does nothing. */
12614 if (fmt_str[0] == '\0')
12616 /* If FP has side-effects, just wait until gimplification is
12617 done. */
12618 if (TREE_SIDE_EFFECTS (fp))
12619 return NULL_TREE;
12621 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12624 /* When "string" doesn't contain %, replace all cases of
12625 fprintf (fp, string) with fputs (string, fp). The fputs
12626 builtin will take care of special cases like length == 1. */
12627 if (fn_fputs)
12628 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12631 /* The other optimizations can be done only on the non-va_list variants. */
12632 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12633 return NULL_TREE;
12635 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12636 else if (strcmp (fmt_str, target_percent_s) == 0)
12638 if (!arg || !validate_arg (arg, POINTER_TYPE))
12639 return NULL_TREE;
12640 if (fn_fputs)
12641 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12644 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12645 else if (strcmp (fmt_str, target_percent_c) == 0)
12647 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12648 return NULL_TREE;
12649 if (fn_fputc)
12650 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12653 if (!call)
12654 return NULL_TREE;
12655 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12658 /* Initialize format string characters in the target charset. */
12660 static bool
12661 init_target_chars (void)
12663 static bool init;
12664 if (!init)
12666 target_newline = lang_hooks.to_target_charset ('\n');
12667 target_percent = lang_hooks.to_target_charset ('%');
12668 target_c = lang_hooks.to_target_charset ('c');
12669 target_s = lang_hooks.to_target_charset ('s');
12670 if (target_newline == 0 || target_percent == 0 || target_c == 0
12671 || target_s == 0)
12672 return false;
12674 target_percent_c[0] = target_percent;
12675 target_percent_c[1] = target_c;
12676 target_percent_c[2] = '\0';
12678 target_percent_s[0] = target_percent;
12679 target_percent_s[1] = target_s;
12680 target_percent_s[2] = '\0';
12682 target_percent_s_newline[0] = target_percent;
12683 target_percent_s_newline[1] = target_s;
12684 target_percent_s_newline[2] = target_newline;
12685 target_percent_s_newline[3] = '\0';
12687 init = true;
12689 return true;
12692 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12693 and no overflow/underflow occurred. INEXACT is true if M was not
12694 exactly calculated. TYPE is the tree type for the result. This
12695 function assumes that you cleared the MPFR flags and then
12696 calculated M to see if anything subsequently set a flag prior to
12697 entering this function. Return NULL_TREE if any checks fail. */
12699 static tree
12700 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12702 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12703 overflow/underflow occurred. If -frounding-math, proceed iff the
12704 result of calling FUNC was exact. */
12705 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12706 && (!flag_rounding_math || !inexact))
12708 REAL_VALUE_TYPE rr;
12710 real_from_mpfr (&rr, m, type, GMP_RNDN);
12711 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12712 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12713 but the mpft_t is not, then we underflowed in the
12714 conversion. */
12715 if (real_isfinite (&rr)
12716 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12718 REAL_VALUE_TYPE rmode;
12720 real_convert (&rmode, TYPE_MODE (type), &rr);
12721 /* Proceed iff the specified mode can hold the value. */
12722 if (real_identical (&rmode, &rr))
12723 return build_real (type, rmode);
12726 return NULL_TREE;
12729 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12730 number and no overflow/underflow occurred. INEXACT is true if M
12731 was not exactly calculated. TYPE is the tree type for the result.
12732 This function assumes that you cleared the MPFR flags and then
12733 calculated M to see if anything subsequently set a flag prior to
12734 entering this function. Return NULL_TREE if any checks fail, if
12735 FORCE_CONVERT is true, then bypass the checks. */
12737 static tree
12738 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12740 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12741 overflow/underflow occurred. If -frounding-math, proceed iff the
12742 result of calling FUNC was exact. */
12743 if (force_convert
12744 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12745 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12746 && (!flag_rounding_math || !inexact)))
12748 REAL_VALUE_TYPE re, im;
12750 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12751 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12752 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12753 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12754 but the mpft_t is not, then we underflowed in the
12755 conversion. */
12756 if (force_convert
12757 || (real_isfinite (&re) && real_isfinite (&im)
12758 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12759 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12761 REAL_VALUE_TYPE re_mode, im_mode;
12763 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12764 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12765 /* Proceed iff the specified mode can hold the value. */
12766 if (force_convert
12767 || (real_identical (&re_mode, &re)
12768 && real_identical (&im_mode, &im)))
12769 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12770 build_real (TREE_TYPE (type), im_mode));
12773 return NULL_TREE;
12776 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12777 FUNC on it and return the resulting value as a tree with type TYPE.
12778 If MIN and/or MAX are not NULL, then the supplied ARG must be
12779 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12780 acceptable values, otherwise they are not. The mpfr precision is
12781 set to the precision of TYPE. We assume that function FUNC returns
12782 zero if the result could be calculated exactly within the requested
12783 precision. */
12785 static tree
12786 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12787 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12788 bool inclusive)
12790 tree result = NULL_TREE;
12792 STRIP_NOPS (arg);
12794 /* To proceed, MPFR must exactly represent the target floating point
12795 format, which only happens when the target base equals two. */
12796 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12797 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12799 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12801 if (real_isfinite (ra)
12802 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12803 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12805 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12806 const int prec = fmt->p;
12807 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12808 int inexact;
12809 mpfr_t m;
12811 mpfr_init2 (m, prec);
12812 mpfr_from_real (m, ra, GMP_RNDN);
12813 mpfr_clear_flags ();
12814 inexact = func (m, m, rnd);
12815 result = do_mpfr_ckconv (m, type, inexact);
12816 mpfr_clear (m);
12820 return result;
12823 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12824 FUNC on it and return the resulting value as a tree with type TYPE.
12825 The mpfr precision is set to the precision of TYPE. We assume that
12826 function FUNC returns zero if the result could be calculated
12827 exactly within the requested precision. */
12829 static tree
12830 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12831 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12833 tree result = NULL_TREE;
12835 STRIP_NOPS (arg1);
12836 STRIP_NOPS (arg2);
12838 /* To proceed, MPFR must exactly represent the target floating point
12839 format, which only happens when the target base equals two. */
12840 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12841 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12842 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12844 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12845 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12847 if (real_isfinite (ra1) && real_isfinite (ra2))
12849 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12850 const int prec = fmt->p;
12851 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12852 int inexact;
12853 mpfr_t m1, m2;
12855 mpfr_inits2 (prec, m1, m2, NULL);
12856 mpfr_from_real (m1, ra1, GMP_RNDN);
12857 mpfr_from_real (m2, ra2, GMP_RNDN);
12858 mpfr_clear_flags ();
12859 inexact = func (m1, m1, m2, rnd);
12860 result = do_mpfr_ckconv (m1, type, inexact);
12861 mpfr_clears (m1, m2, NULL);
12865 return result;
12868 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12869 FUNC on it and return the resulting value as a tree with type TYPE.
12870 The mpfr precision is set to the precision of TYPE. We assume that
12871 function FUNC returns zero if the result could be calculated
12872 exactly within the requested precision. */
12874 static tree
12875 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12876 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12878 tree result = NULL_TREE;
12880 STRIP_NOPS (arg1);
12881 STRIP_NOPS (arg2);
12882 STRIP_NOPS (arg3);
12884 /* To proceed, MPFR must exactly represent the target floating point
12885 format, which only happens when the target base equals two. */
12886 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12887 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12888 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12889 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12891 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12892 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12893 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12895 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12897 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12898 const int prec = fmt->p;
12899 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12900 int inexact;
12901 mpfr_t m1, m2, m3;
12903 mpfr_inits2 (prec, m1, m2, m3, NULL);
12904 mpfr_from_real (m1, ra1, GMP_RNDN);
12905 mpfr_from_real (m2, ra2, GMP_RNDN);
12906 mpfr_from_real (m3, ra3, GMP_RNDN);
12907 mpfr_clear_flags ();
12908 inexact = func (m1, m1, m2, m3, rnd);
12909 result = do_mpfr_ckconv (m1, type, inexact);
12910 mpfr_clears (m1, m2, m3, NULL);
12914 return result;
12917 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12918 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12919 If ARG_SINP and ARG_COSP are NULL then the result is returned
12920 as a complex value.
12921 The type is taken from the type of ARG and is used for setting the
12922 precision of the calculation and results. */
12924 static tree
12925 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12927 tree const type = TREE_TYPE (arg);
12928 tree result = NULL_TREE;
12930 STRIP_NOPS (arg);
12932 /* To proceed, MPFR must exactly represent the target floating point
12933 format, which only happens when the target base equals two. */
12934 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12935 && TREE_CODE (arg) == REAL_CST
12936 && !TREE_OVERFLOW (arg))
12938 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12940 if (real_isfinite (ra))
12942 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12943 const int prec = fmt->p;
12944 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12945 tree result_s, result_c;
12946 int inexact;
12947 mpfr_t m, ms, mc;
12949 mpfr_inits2 (prec, m, ms, mc, NULL);
12950 mpfr_from_real (m, ra, GMP_RNDN);
12951 mpfr_clear_flags ();
12952 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12953 result_s = do_mpfr_ckconv (ms, type, inexact);
12954 result_c = do_mpfr_ckconv (mc, type, inexact);
12955 mpfr_clears (m, ms, mc, NULL);
12956 if (result_s && result_c)
12958 /* If we are to return in a complex value do so. */
12959 if (!arg_sinp && !arg_cosp)
12960 return build_complex (build_complex_type (type),
12961 result_c, result_s);
12963 /* Dereference the sin/cos pointer arguments. */
12964 arg_sinp = build_fold_indirect_ref (arg_sinp);
12965 arg_cosp = build_fold_indirect_ref (arg_cosp);
12966 /* Proceed if valid pointer type were passed in. */
12967 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12968 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12970 /* Set the values. */
12971 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12972 result_s);
12973 TREE_SIDE_EFFECTS (result_s) = 1;
12974 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12975 result_c);
12976 TREE_SIDE_EFFECTS (result_c) = 1;
12977 /* Combine the assignments into a compound expr. */
12978 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12979 result_s, result_c));
12984 return result;
12987 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12988 two-argument mpfr order N Bessel function FUNC on them and return
12989 the resulting value as a tree with type TYPE. The mpfr precision
12990 is set to the precision of TYPE. We assume that function FUNC
12991 returns zero if the result could be calculated exactly within the
12992 requested precision. */
12993 static tree
12994 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12995 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12996 const REAL_VALUE_TYPE *min, bool inclusive)
12998 tree result = NULL_TREE;
13000 STRIP_NOPS (arg1);
13001 STRIP_NOPS (arg2);
13003 /* To proceed, MPFR must exactly represent the target floating point
13004 format, which only happens when the target base equals two. */
13005 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13006 && host_integerp (arg1, 0)
13007 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13009 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13010 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13012 if (n == (long)n
13013 && real_isfinite (ra)
13014 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13016 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13017 const int prec = fmt->p;
13018 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13019 int inexact;
13020 mpfr_t m;
13022 mpfr_init2 (m, prec);
13023 mpfr_from_real (m, ra, GMP_RNDN);
13024 mpfr_clear_flags ();
13025 inexact = func (m, n, m, rnd);
13026 result = do_mpfr_ckconv (m, type, inexact);
13027 mpfr_clear (m);
13031 return result;
13034 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13035 the pointer *(ARG_QUO) and return the result. The type is taken
13036 from the type of ARG0 and is used for setting the precision of the
13037 calculation and results. */
13039 static tree
13040 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13042 tree const type = TREE_TYPE (arg0);
13043 tree result = NULL_TREE;
13045 STRIP_NOPS (arg0);
13046 STRIP_NOPS (arg1);
13048 /* To proceed, MPFR must exactly represent the target floating point
13049 format, which only happens when the target base equals two. */
13050 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13051 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13052 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13054 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13055 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13057 if (real_isfinite (ra0) && real_isfinite (ra1))
13059 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13060 const int prec = fmt->p;
13061 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13062 tree result_rem;
13063 long integer_quo;
13064 mpfr_t m0, m1;
13066 mpfr_inits2 (prec, m0, m1, NULL);
13067 mpfr_from_real (m0, ra0, GMP_RNDN);
13068 mpfr_from_real (m1, ra1, GMP_RNDN);
13069 mpfr_clear_flags ();
13070 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13071 /* Remquo is independent of the rounding mode, so pass
13072 inexact=0 to do_mpfr_ckconv(). */
13073 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13074 mpfr_clears (m0, m1, NULL);
13075 if (result_rem)
13077 /* MPFR calculates quo in the host's long so it may
13078 return more bits in quo than the target int can hold
13079 if sizeof(host long) > sizeof(target int). This can
13080 happen even for native compilers in LP64 mode. In
13081 these cases, modulo the quo value with the largest
13082 number that the target int can hold while leaving one
13083 bit for the sign. */
13084 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13085 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13087 /* Dereference the quo pointer argument. */
13088 arg_quo = build_fold_indirect_ref (arg_quo);
13089 /* Proceed iff a valid pointer type was passed in. */
13090 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13092 /* Set the value. */
13093 tree result_quo = fold_build2 (MODIFY_EXPR,
13094 TREE_TYPE (arg_quo), arg_quo,
13095 build_int_cst (NULL, integer_quo));
13096 TREE_SIDE_EFFECTS (result_quo) = 1;
13097 /* Combine the quo assignment with the rem. */
13098 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13099 result_quo, result_rem));
13104 return result;
13107 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13108 resulting value as a tree with type TYPE. The mpfr precision is
13109 set to the precision of TYPE. We assume that this mpfr function
13110 returns zero if the result could be calculated exactly within the
13111 requested precision. In addition, the integer pointer represented
13112 by ARG_SG will be dereferenced and set to the appropriate signgam
13113 (-1,1) value. */
13115 static tree
13116 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13118 tree result = NULL_TREE;
13120 STRIP_NOPS (arg);
13122 /* To proceed, MPFR must exactly represent the target floating point
13123 format, which only happens when the target base equals two. Also
13124 verify ARG is a constant and that ARG_SG is an int pointer. */
13125 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13126 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13127 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13128 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13130 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13132 /* In addition to NaN and Inf, the argument cannot be zero or a
13133 negative integer. */
13134 if (real_isfinite (ra)
13135 && ra->cl != rvc_zero
13136 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13138 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13139 const int prec = fmt->p;
13140 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13141 int inexact, sg;
13142 mpfr_t m;
13143 tree result_lg;
13145 mpfr_init2 (m, prec);
13146 mpfr_from_real (m, ra, GMP_RNDN);
13147 mpfr_clear_flags ();
13148 inexact = mpfr_lgamma (m, &sg, m, rnd);
13149 result_lg = do_mpfr_ckconv (m, type, inexact);
13150 mpfr_clear (m);
13151 if (result_lg)
13153 tree result_sg;
13155 /* Dereference the arg_sg pointer argument. */
13156 arg_sg = build_fold_indirect_ref (arg_sg);
13157 /* Assign the signgam value into *arg_sg. */
13158 result_sg = fold_build2 (MODIFY_EXPR,
13159 TREE_TYPE (arg_sg), arg_sg,
13160 build_int_cst (NULL, sg));
13161 TREE_SIDE_EFFECTS (result_sg) = 1;
13162 /* Combine the signgam assignment with the lgamma result. */
13163 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13164 result_sg, result_lg));
13169 return result;
13172 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13173 function FUNC on it and return the resulting value as a tree with
13174 type TYPE. The mpfr precision is set to the precision of TYPE. We
13175 assume that function FUNC returns zero if the result could be
13176 calculated exactly within the requested precision. */
13178 static tree
13179 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13181 tree result = NULL_TREE;
13183 STRIP_NOPS (arg);
13185 /* To proceed, MPFR must exactly represent the target floating point
13186 format, which only happens when the target base equals two. */
13187 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13188 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13189 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13191 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13192 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13194 if (real_isfinite (re) && real_isfinite (im))
13196 const struct real_format *const fmt =
13197 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13198 const int prec = fmt->p;
13199 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13200 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13201 int inexact;
13202 mpc_t m;
13204 mpc_init2 (m, prec);
13205 mpfr_from_real (mpc_realref(m), re, rnd);
13206 mpfr_from_real (mpc_imagref(m), im, rnd);
13207 mpfr_clear_flags ();
13208 inexact = func (m, m, crnd);
13209 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13210 mpc_clear (m);
13214 return result;
13217 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13218 mpc function FUNC on it and return the resulting value as a tree
13219 with type TYPE. The mpfr precision is set to the precision of
13220 TYPE. We assume that function FUNC returns zero if the result
13221 could be calculated exactly within the requested precision. If
13222 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13223 in the arguments and/or results. */
13225 tree
13226 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13227 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13229 tree result = NULL_TREE;
13231 STRIP_NOPS (arg0);
13232 STRIP_NOPS (arg1);
13234 /* To proceed, MPFR must exactly represent the target floating point
13235 format, which only happens when the target base equals two. */
13236 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13237 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13238 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13239 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13240 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13242 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13243 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13244 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13245 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13247 if (do_nonfinite
13248 || (real_isfinite (re0) && real_isfinite (im0)
13249 && real_isfinite (re1) && real_isfinite (im1)))
13251 const struct real_format *const fmt =
13252 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13253 const int prec = fmt->p;
13254 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13255 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13256 int inexact;
13257 mpc_t m0, m1;
13259 mpc_init2 (m0, prec);
13260 mpc_init2 (m1, prec);
13261 mpfr_from_real (mpc_realref(m0), re0, rnd);
13262 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13263 mpfr_from_real (mpc_realref(m1), re1, rnd);
13264 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13265 mpfr_clear_flags ();
13266 inexact = func (m0, m0, m1, crnd);
13267 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13268 mpc_clear (m0);
13269 mpc_clear (m1);
13273 return result;
13276 /* FIXME tuples.
13277 The functions below provide an alternate interface for folding
13278 builtin function calls presented as GIMPLE_CALL statements rather
13279 than as CALL_EXPRs. The folded result is still expressed as a
13280 tree. There is too much code duplication in the handling of
13281 varargs functions, and a more intrusive re-factoring would permit
13282 better sharing of code between the tree and statement-based
13283 versions of these functions. */
13285 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13286 along with N new arguments specified as the "..." parameters. SKIP
13287 is the number of arguments in STMT to be omitted. This function is used
13288 to do varargs-to-varargs transformations. */
13290 static tree
13291 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13293 int oldnargs = gimple_call_num_args (stmt);
13294 int nargs = oldnargs - skip + n;
13295 tree fntype = TREE_TYPE (fndecl);
13296 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13297 tree *buffer;
13298 int i, j;
13299 va_list ap;
13300 location_t loc = gimple_location (stmt);
13302 buffer = XALLOCAVEC (tree, nargs);
13303 va_start (ap, n);
13304 for (i = 0; i < n; i++)
13305 buffer[i] = va_arg (ap, tree);
13306 va_end (ap);
13307 for (j = skip; j < oldnargs; j++, i++)
13308 buffer[i] = gimple_call_arg (stmt, j);
13310 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13313 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13314 a normal call should be emitted rather than expanding the function
13315 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13317 static tree
13318 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13320 tree dest, size, len, fn, fmt, flag;
13321 const char *fmt_str;
13322 int nargs = gimple_call_num_args (stmt);
13324 /* Verify the required arguments in the original call. */
13325 if (nargs < 4)
13326 return NULL_TREE;
13327 dest = gimple_call_arg (stmt, 0);
13328 if (!validate_arg (dest, POINTER_TYPE))
13329 return NULL_TREE;
13330 flag = gimple_call_arg (stmt, 1);
13331 if (!validate_arg (flag, INTEGER_TYPE))
13332 return NULL_TREE;
13333 size = gimple_call_arg (stmt, 2);
13334 if (!validate_arg (size, INTEGER_TYPE))
13335 return NULL_TREE;
13336 fmt = gimple_call_arg (stmt, 3);
13337 if (!validate_arg (fmt, POINTER_TYPE))
13338 return NULL_TREE;
13340 if (! host_integerp (size, 1))
13341 return NULL_TREE;
13343 len = NULL_TREE;
13345 if (!init_target_chars ())
13346 return NULL_TREE;
13348 /* Check whether the format is a literal string constant. */
13349 fmt_str = c_getstr (fmt);
13350 if (fmt_str != NULL)
13352 /* If the format doesn't contain % args or %%, we know the size. */
13353 if (strchr (fmt_str, target_percent) == 0)
13355 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13356 len = build_int_cstu (size_type_node, strlen (fmt_str));
13358 /* If the format is "%s" and first ... argument is a string literal,
13359 we know the size too. */
13360 else if (fcode == BUILT_IN_SPRINTF_CHK
13361 && strcmp (fmt_str, target_percent_s) == 0)
13363 tree arg;
13365 if (nargs == 5)
13367 arg = gimple_call_arg (stmt, 4);
13368 if (validate_arg (arg, POINTER_TYPE))
13370 len = c_strlen (arg, 1);
13371 if (! len || ! host_integerp (len, 1))
13372 len = NULL_TREE;
13378 if (! integer_all_onesp (size))
13380 if (! len || ! tree_int_cst_lt (len, size))
13381 return NULL_TREE;
13384 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13385 or if format doesn't contain % chars or is "%s". */
13386 if (! integer_zerop (flag))
13388 if (fmt_str == NULL)
13389 return NULL_TREE;
13390 if (strchr (fmt_str, target_percent) != NULL
13391 && strcmp (fmt_str, target_percent_s))
13392 return NULL_TREE;
13395 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13396 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13397 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13398 if (!fn)
13399 return NULL_TREE;
13401 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13404 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13405 a normal call should be emitted rather than expanding the function
13406 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13407 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13408 passed as second argument. */
13410 tree
13411 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13412 enum built_in_function fcode)
13414 tree dest, size, len, fn, fmt, flag;
13415 const char *fmt_str;
13417 /* Verify the required arguments in the original call. */
13418 if (gimple_call_num_args (stmt) < 5)
13419 return NULL_TREE;
13420 dest = gimple_call_arg (stmt, 0);
13421 if (!validate_arg (dest, POINTER_TYPE))
13422 return NULL_TREE;
13423 len = gimple_call_arg (stmt, 1);
13424 if (!validate_arg (len, INTEGER_TYPE))
13425 return NULL_TREE;
13426 flag = gimple_call_arg (stmt, 2);
13427 if (!validate_arg (flag, INTEGER_TYPE))
13428 return NULL_TREE;
13429 size = gimple_call_arg (stmt, 3);
13430 if (!validate_arg (size, INTEGER_TYPE))
13431 return NULL_TREE;
13432 fmt = gimple_call_arg (stmt, 4);
13433 if (!validate_arg (fmt, POINTER_TYPE))
13434 return NULL_TREE;
13436 if (! host_integerp (size, 1))
13437 return NULL_TREE;
13439 if (! integer_all_onesp (size))
13441 if (! host_integerp (len, 1))
13443 /* If LEN is not constant, try MAXLEN too.
13444 For MAXLEN only allow optimizing into non-_ocs function
13445 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13446 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13447 return NULL_TREE;
13449 else
13450 maxlen = len;
13452 if (tree_int_cst_lt (size, maxlen))
13453 return NULL_TREE;
13456 if (!init_target_chars ())
13457 return NULL_TREE;
13459 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13460 or if format doesn't contain % chars or is "%s". */
13461 if (! integer_zerop (flag))
13463 fmt_str = c_getstr (fmt);
13464 if (fmt_str == NULL)
13465 return NULL_TREE;
13466 if (strchr (fmt_str, target_percent) != NULL
13467 && strcmp (fmt_str, target_percent_s))
13468 return NULL_TREE;
13471 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13472 available. */
13473 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13474 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13475 if (!fn)
13476 return NULL_TREE;
13478 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13481 /* Builtins with folding operations that operate on "..." arguments
13482 need special handling; we need to store the arguments in a convenient
13483 data structure before attempting any folding. Fortunately there are
13484 only a few builtins that fall into this category. FNDECL is the
13485 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13486 result of the function call is ignored. */
13488 static tree
13489 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13490 bool ignore ATTRIBUTE_UNUSED)
13492 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13493 tree ret = NULL_TREE;
13495 switch (fcode)
13497 case BUILT_IN_SPRINTF_CHK:
13498 case BUILT_IN_VSPRINTF_CHK:
13499 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13500 break;
13502 case BUILT_IN_SNPRINTF_CHK:
13503 case BUILT_IN_VSNPRINTF_CHK:
13504 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13506 default:
13507 break;
13509 if (ret)
13511 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13512 TREE_NO_WARNING (ret) = 1;
13513 return ret;
13515 return NULL_TREE;
13518 /* A wrapper function for builtin folding that prevents warnings for
13519 "statement without effect" and the like, caused by removing the
13520 call node earlier than the warning is generated. */
13522 tree
13523 fold_call_stmt (gimple stmt, bool ignore)
13525 tree ret = NULL_TREE;
13526 tree fndecl = gimple_call_fndecl (stmt);
13527 location_t loc = gimple_location (stmt);
13528 if (fndecl
13529 && TREE_CODE (fndecl) == FUNCTION_DECL
13530 && DECL_BUILT_IN (fndecl)
13531 && !gimple_call_va_arg_pack_p (stmt))
13533 int nargs = gimple_call_num_args (stmt);
13535 if (avoid_folding_inline_builtin (fndecl))
13536 return NULL_TREE;
13537 /* FIXME: Don't use a list in this interface. */
13538 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13540 tree arglist = NULL_TREE;
13541 int i;
13542 for (i = nargs - 1; i >= 0; i--)
13543 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13544 return targetm.fold_builtin (fndecl, arglist, ignore);
13546 else
13548 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13550 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13551 int i;
13552 for (i = 0; i < nargs; i++)
13553 args[i] = gimple_call_arg (stmt, i);
13554 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13556 if (!ret)
13557 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13558 if (ret)
13560 /* Propagate location information from original call to
13561 expansion of builtin. Otherwise things like
13562 maybe_emit_chk_warning, that operate on the expansion
13563 of a builtin, will use the wrong location information. */
13564 if (gimple_has_location (stmt))
13566 tree realret = ret;
13567 if (TREE_CODE (ret) == NOP_EXPR)
13568 realret = TREE_OPERAND (ret, 0);
13569 if (CAN_HAVE_LOCATION_P (realret)
13570 && !EXPR_HAS_LOCATION (realret))
13571 SET_EXPR_LOCATION (realret, loc);
13572 return realret;
13574 return ret;
13578 return NULL_TREE;
13581 /* Look up the function in built_in_decls that corresponds to DECL
13582 and set ASMSPEC as its user assembler name. DECL must be a
13583 function decl that declares a builtin. */
13585 void
13586 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13588 tree builtin;
13589 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13590 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13591 && asmspec != 0);
13593 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13594 set_user_assembler_name (builtin, asmspec);
13595 switch (DECL_FUNCTION_CODE (decl))
13597 case BUILT_IN_MEMCPY:
13598 init_block_move_fn (asmspec);
13599 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13600 break;
13601 case BUILT_IN_MEMSET:
13602 init_block_clear_fn (asmspec);
13603 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13604 break;
13605 case BUILT_IN_MEMMOVE:
13606 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13607 break;
13608 case BUILT_IN_MEMCMP:
13609 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13610 break;
13611 case BUILT_IN_ABORT:
13612 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13613 break;
13614 case BUILT_IN_FFS:
13615 if (INT_TYPE_SIZE < BITS_PER_WORD)
13617 set_user_assembler_libfunc ("ffs", asmspec);
13618 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13619 MODE_INT, 0), "ffs");
13621 break;
13622 default:
13623 break;