Merged r158229 through r158464 into branch.
[official-gcc.git] / gcc / builtins.c
blobdbab4847af025f79fb88c0a0deb399ec2aa87175
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
72 #undef DEF_BUILTIN
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
90 #endif
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 enum tree_code);
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
230 bool
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 return false;
241 /* Return true if DECL is a function symbol representing a built-in. */
243 bool
244 is_builtin_fn (tree decl)
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
254 static bool
255 called_as_built_in (tree node)
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
259 will have. */
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
271 unsigned int inner;
273 inner = max_align;
274 if (handled_component_p (exp))
276 HOST_WIDE_INT bitsize, bitpos;
277 tree offset;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
283 if (bitpos)
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
285 while (offset)
287 tree next_offset;
289 if (TREE_CODE (offset) == PLUS_EXPR)
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
294 else
295 next_offset = NULL;
296 if (host_integerp (offset, 1))
298 /* Any overflow in calculating offset_bits won't change
299 the alignment. */
300 unsigned offset_bits
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
303 if (offset_bits)
304 inner = MIN (inner, (offset_bits & -offset_bits));
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
309 /* Any overflow in calculating offset_factor won't change
310 the alignment. */
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
313 * BITS_PER_UNIT);
315 if (offset_factor)
316 inner = MIN (inner, (offset_factor & -offset_factor));
318 else
320 inner = MIN (inner, BITS_PER_UNIT);
321 break;
323 offset = next_offset;
326 if (TREE_CODE (exp) == CONST_DECL)
327 exp = DECL_INITIAL (exp);
328 if (DECL_P (exp)
329 && TREE_CODE (exp) != LABEL_DECL)
330 align = MIN (inner, DECL_ALIGN (exp));
331 #ifdef CONSTANT_ALIGNMENT
332 else if (CONSTANT_CLASS_P (exp))
333 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
334 #endif
335 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
336 || TREE_CODE (exp) == INDIRECT_REF)
337 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
338 else
339 align = MIN (align, inner);
340 return MIN (align, max_align);
343 /* Returns true iff we can trust that alignment information has been
344 calculated properly. */
346 bool
347 can_trust_pointer_alignment (void)
349 /* We rely on TER to compute accurate alignment information. */
350 return (optimize && flag_tree_ter);
353 /* Return the alignment in bits of EXP, a pointer valued expression.
354 But don't return more than MAX_ALIGN no matter what.
355 The alignment returned is, by default, the alignment of the thing that
356 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
358 Otherwise, look at the expression to see if we can do better, i.e., if the
359 expression is actually pointing at an object whose alignment is tighter. */
362 get_pointer_alignment (tree exp, unsigned int max_align)
364 unsigned int align, inner;
366 if (!can_trust_pointer_alignment ())
367 return 0;
369 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
370 return 0;
372 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
373 align = MIN (align, max_align);
375 while (1)
377 switch (TREE_CODE (exp))
379 CASE_CONVERT:
380 exp = TREE_OPERAND (exp, 0);
381 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
382 return align;
384 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
385 align = MIN (inner, max_align);
386 break;
388 case POINTER_PLUS_EXPR:
389 /* If sum of pointer + int, restrict our maximum alignment to that
390 imposed by the integer. If not, we can't do any better than
391 ALIGN. */
392 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
393 return align;
395 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
396 & (max_align / BITS_PER_UNIT - 1))
397 != 0)
398 max_align >>= 1;
400 exp = TREE_OPERAND (exp, 0);
401 break;
403 case ADDR_EXPR:
404 /* See what we are pointing at and look at its alignment. */
405 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
407 default:
408 return align;
413 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
414 way, because it could contain a zero byte in the middle.
415 TREE_STRING_LENGTH is the size of the character array, not the string.
417 ONLY_VALUE should be nonzero if the result is not going to be emitted
418 into the instruction stream and zero if it is going to be expanded.
419 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
420 is returned, otherwise NULL, since
421 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
422 evaluate the side-effects.
424 The value returned is of type `ssizetype'.
426 Unfortunately, string_constant can't access the values of const char
427 arrays with initializers, so neither can we do so here. */
429 tree
430 c_strlen (tree src, int only_value)
432 tree offset_node;
433 HOST_WIDE_INT offset;
434 int max;
435 const char *ptr;
436 location_t loc;
438 STRIP_NOPS (src);
439 if (TREE_CODE (src) == COND_EXPR
440 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
442 tree len1, len2;
444 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
445 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
446 if (tree_int_cst_equal (len1, len2))
447 return len1;
450 if (TREE_CODE (src) == COMPOUND_EXPR
451 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
452 return c_strlen (TREE_OPERAND (src, 1), only_value);
454 if (EXPR_HAS_LOCATION (src))
455 loc = EXPR_LOCATION (src);
456 else
457 loc = input_location;
459 src = string_constant (src, &offset_node);
460 if (src == 0)
461 return NULL_TREE;
463 max = TREE_STRING_LENGTH (src) - 1;
464 ptr = TREE_STRING_POINTER (src);
466 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
468 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
469 compute the offset to the following null if we don't know where to
470 start searching for it. */
471 int i;
473 for (i = 0; i < max; i++)
474 if (ptr[i] == 0)
475 return NULL_TREE;
477 /* We don't know the starting offset, but we do know that the string
478 has no internal zero bytes. We can assume that the offset falls
479 within the bounds of the string; otherwise, the programmer deserves
480 what he gets. Subtract the offset from the length of the string,
481 and return that. This would perhaps not be valid if we were dealing
482 with named arrays in addition to literal string constants. */
484 return size_diffop_loc (loc, size_int (max), offset_node);
487 /* We have a known offset into the string. Start searching there for
488 a null character if we can represent it as a single HOST_WIDE_INT. */
489 if (offset_node == 0)
490 offset = 0;
491 else if (! host_integerp (offset_node, 0))
492 offset = -1;
493 else
494 offset = tree_low_cst (offset_node, 0);
496 /* If the offset is known to be out of bounds, warn, and call strlen at
497 runtime. */
498 if (offset < 0 || offset > max)
500 /* Suppress multiple warnings for propagated constant strings. */
501 if (! TREE_NO_WARNING (src))
503 warning_at (loc, 0, "offset outside bounds of constant string");
504 TREE_NO_WARNING (src) = 1;
506 return NULL_TREE;
509 /* Use strlen to search for the first zero byte. Since any strings
510 constructed with build_string will have nulls appended, we win even
511 if we get handed something like (char[4])"abcd".
513 Since OFFSET is our starting index into the string, no further
514 calculation is needed. */
515 return ssize_int (strlen (ptr + offset));
518 /* Return a char pointer for a C string if it is a string constant
519 or sum of string constant and integer constant. */
521 static const char *
522 c_getstr (tree src)
524 tree offset_node;
526 src = string_constant (src, &offset_node);
527 if (src == 0)
528 return 0;
530 if (offset_node == 0)
531 return TREE_STRING_POINTER (src);
532 else if (!host_integerp (offset_node, 1)
533 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
534 return 0;
536 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
539 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
540 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
542 static rtx
543 c_readstr (const char *str, enum machine_mode mode)
545 HOST_WIDE_INT c[2];
546 HOST_WIDE_INT ch;
547 unsigned int i, j;
549 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
551 c[0] = 0;
552 c[1] = 0;
553 ch = 1;
554 for (i = 0; i < GET_MODE_SIZE (mode); i++)
556 j = i;
557 if (WORDS_BIG_ENDIAN)
558 j = GET_MODE_SIZE (mode) - i - 1;
559 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
560 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
561 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
562 j *= BITS_PER_UNIT;
563 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
565 if (ch)
566 ch = (unsigned char) str[i];
567 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
569 return immed_double_const (c[0], c[1], mode);
572 /* Cast a target constant CST to target CHAR and if that value fits into
573 host char type, return zero and put that value into variable pointed to by
574 P. */
576 static int
577 target_char_cast (tree cst, char *p)
579 unsigned HOST_WIDE_INT val, hostval;
581 if (!host_integerp (cst, 1)
582 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
583 return 1;
585 val = tree_low_cst (cst, 1);
586 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
587 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
589 hostval = val;
590 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
591 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
593 if (val != hostval)
594 return 1;
596 *p = hostval;
597 return 0;
600 /* Similar to save_expr, but assumes that arbitrary code is not executed
601 in between the multiple evaluations. In particular, we assume that a
602 non-addressable local variable will not be modified. */
604 static tree
605 builtin_save_expr (tree exp)
607 if (TREE_ADDRESSABLE (exp) == 0
608 && (TREE_CODE (exp) == PARM_DECL
609 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
610 return exp;
612 return save_expr (exp);
615 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
616 times to get the address of either a higher stack frame, or a return
617 address located within it (depending on FNDECL_CODE). */
619 static rtx
620 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
622 int i;
624 #ifdef INITIAL_FRAME_ADDRESS_RTX
625 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
626 #else
627 rtx tem;
629 /* For a zero count with __builtin_return_address, we don't care what
630 frame address we return, because target-specific definitions will
631 override us. Therefore frame pointer elimination is OK, and using
632 the soft frame pointer is OK.
634 For a nonzero count, or a zero count with __builtin_frame_address,
635 we require a stable offset from the current frame pointer to the
636 previous one, so we must use the hard frame pointer, and
637 we must disable frame pointer elimination. */
638 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
639 tem = frame_pointer_rtx;
640 else
642 tem = hard_frame_pointer_rtx;
644 /* Tell reload not to eliminate the frame pointer. */
645 crtl->accesses_prior_frames = 1;
647 #endif
649 /* Some machines need special handling before we can access
650 arbitrary frames. For example, on the SPARC, we must first flush
651 all register windows to the stack. */
652 #ifdef SETUP_FRAME_ADDRESSES
653 if (count > 0)
654 SETUP_FRAME_ADDRESSES ();
655 #endif
657 /* On the SPARC, the return address is not in the frame, it is in a
658 register. There is no way to access it off of the current frame
659 pointer, but it can be accessed off the previous frame pointer by
660 reading the value from the register window save area. */
661 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
662 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
663 count--;
664 #endif
666 /* Scan back COUNT frames to the specified frame. */
667 for (i = 0; i < count; i++)
669 /* Assume the dynamic chain pointer is in the word that the
670 frame address points to, unless otherwise specified. */
671 #ifdef DYNAMIC_CHAIN_ADDRESS
672 tem = DYNAMIC_CHAIN_ADDRESS (tem);
673 #endif
674 tem = memory_address (Pmode, tem);
675 tem = gen_frame_mem (Pmode, tem);
676 tem = copy_to_reg (tem);
679 /* For __builtin_frame_address, return what we've got. But, on
680 the SPARC for example, we may have to add a bias. */
681 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
682 #ifdef FRAME_ADDR_RTX
683 return FRAME_ADDR_RTX (tem);
684 #else
685 return tem;
686 #endif
688 /* For __builtin_return_address, get the return address from that frame. */
689 #ifdef RETURN_ADDR_RTX
690 tem = RETURN_ADDR_RTX (count, tem);
691 #else
692 tem = memory_address (Pmode,
693 plus_constant (tem, GET_MODE_SIZE (Pmode)));
694 tem = gen_frame_mem (Pmode, tem);
695 #endif
696 return tem;
699 /* Alias set used for setjmp buffer. */
700 static alias_set_type setjmp_alias_set = -1;
702 /* Construct the leading half of a __builtin_setjmp call. Control will
703 return to RECEIVER_LABEL. This is also called directly by the SJLJ
704 exception handling code. */
706 void
707 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
709 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
710 rtx stack_save;
711 rtx mem;
713 if (setjmp_alias_set == -1)
714 setjmp_alias_set = new_alias_set ();
716 buf_addr = convert_memory_address (Pmode, buf_addr);
718 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
720 /* We store the frame pointer and the address of receiver_label in
721 the buffer and use the rest of it for the stack save area, which
722 is machine-dependent. */
724 mem = gen_rtx_MEM (Pmode, buf_addr);
725 set_mem_alias_set (mem, setjmp_alias_set);
726 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
728 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
729 set_mem_alias_set (mem, setjmp_alias_set);
731 emit_move_insn (validize_mem (mem),
732 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
734 stack_save = gen_rtx_MEM (sa_mode,
735 plus_constant (buf_addr,
736 2 * GET_MODE_SIZE (Pmode)));
737 set_mem_alias_set (stack_save, setjmp_alias_set);
738 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
740 /* If there is further processing to do, do it. */
741 #ifdef HAVE_builtin_setjmp_setup
742 if (HAVE_builtin_setjmp_setup)
743 emit_insn (gen_builtin_setjmp_setup (buf_addr));
744 #endif
746 /* Tell optimize_save_area_alloca that extra work is going to
747 need to go on during alloca. */
748 cfun->calls_setjmp = 1;
750 /* We have a nonlocal label. */
751 cfun->has_nonlocal_label = 1;
754 /* Construct the trailing part of a __builtin_setjmp call. This is
755 also called directly by the SJLJ exception handling code. */
757 void
758 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
760 rtx chain;
762 /* Clobber the FP when we get here, so we have to make sure it's
763 marked as used by this function. */
764 emit_use (hard_frame_pointer_rtx);
766 /* Mark the static chain as clobbered here so life information
767 doesn't get messed up for it. */
768 chain = targetm.calls.static_chain (current_function_decl, true);
769 if (chain && REG_P (chain))
770 emit_clobber (chain);
772 /* Now put in the code to restore the frame pointer, and argument
773 pointer, if needed. */
774 #ifdef HAVE_nonlocal_goto
775 if (! HAVE_nonlocal_goto)
776 #endif
778 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
779 /* This might change the hard frame pointer in ways that aren't
780 apparent to early optimization passes, so force a clobber. */
781 emit_clobber (hard_frame_pointer_rtx);
784 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
785 if (fixed_regs[ARG_POINTER_REGNUM])
787 #ifdef ELIMINABLE_REGS
788 size_t i;
789 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
791 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
792 if (elim_regs[i].from == ARG_POINTER_REGNUM
793 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
794 break;
796 if (i == ARRAY_SIZE (elim_regs))
797 #endif
799 /* Now restore our arg pointer from the address at which it
800 was saved in our stack frame. */
801 emit_move_insn (crtl->args.internal_arg_pointer,
802 copy_to_reg (get_arg_pointer_save_area ()));
805 #endif
807 #ifdef HAVE_builtin_setjmp_receiver
808 if (HAVE_builtin_setjmp_receiver)
809 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
810 else
811 #endif
812 #ifdef HAVE_nonlocal_goto_receiver
813 if (HAVE_nonlocal_goto_receiver)
814 emit_insn (gen_nonlocal_goto_receiver ());
815 else
816 #endif
817 { /* Nothing */ }
819 /* We must not allow the code we just generated to be reordered by
820 scheduling. Specifically, the update of the frame pointer must
821 happen immediately, not later. */
822 emit_insn (gen_blockage ());
825 /* __builtin_longjmp is passed a pointer to an array of five words (not
826 all will be used on all machines). It operates similarly to the C
827 library function of the same name, but is more efficient. Much of
828 the code below is copied from the handling of non-local gotos. */
830 static void
831 expand_builtin_longjmp (rtx buf_addr, rtx value)
833 rtx fp, lab, stack, insn, last;
834 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
836 /* DRAP is needed for stack realign if longjmp is expanded to current
837 function */
838 if (SUPPORTS_STACK_ALIGNMENT)
839 crtl->need_drap = true;
841 if (setjmp_alias_set == -1)
842 setjmp_alias_set = new_alias_set ();
844 buf_addr = convert_memory_address (Pmode, buf_addr);
846 buf_addr = force_reg (Pmode, buf_addr);
848 /* We require that the user must pass a second argument of 1, because
849 that is what builtin_setjmp will return. */
850 gcc_assert (value == const1_rtx);
852 last = get_last_insn ();
853 #ifdef HAVE_builtin_longjmp
854 if (HAVE_builtin_longjmp)
855 emit_insn (gen_builtin_longjmp (buf_addr));
856 else
857 #endif
859 fp = gen_rtx_MEM (Pmode, buf_addr);
860 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
861 GET_MODE_SIZE (Pmode)));
863 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
864 2 * GET_MODE_SIZE (Pmode)));
865 set_mem_alias_set (fp, setjmp_alias_set);
866 set_mem_alias_set (lab, setjmp_alias_set);
867 set_mem_alias_set (stack, setjmp_alias_set);
869 /* Pick up FP, label, and SP from the block and jump. This code is
870 from expand_goto in stmt.c; see there for detailed comments. */
871 #ifdef HAVE_nonlocal_goto
872 if (HAVE_nonlocal_goto)
873 /* We have to pass a value to the nonlocal_goto pattern that will
874 get copied into the static_chain pointer, but it does not matter
875 what that value is, because builtin_setjmp does not use it. */
876 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
877 else
878 #endif
880 lab = copy_to_reg (lab);
882 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
883 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
885 emit_move_insn (hard_frame_pointer_rtx, fp);
886 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
888 emit_use (hard_frame_pointer_rtx);
889 emit_use (stack_pointer_rtx);
890 emit_indirect_jump (lab);
894 /* Search backwards and mark the jump insn as a non-local goto.
895 Note that this precludes the use of __builtin_longjmp to a
896 __builtin_setjmp target in the same function. However, we've
897 already cautioned the user that these functions are for
898 internal exception handling use only. */
899 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
901 gcc_assert (insn != last);
903 if (JUMP_P (insn))
905 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
906 break;
908 else if (CALL_P (insn))
909 break;
913 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
914 and the address of the save area. */
916 static rtx
917 expand_builtin_nonlocal_goto (tree exp)
919 tree t_label, t_save_area;
920 rtx r_label, r_save_area, r_fp, r_sp, insn;
922 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
923 return NULL_RTX;
925 t_label = CALL_EXPR_ARG (exp, 0);
926 t_save_area = CALL_EXPR_ARG (exp, 1);
928 r_label = expand_normal (t_label);
929 r_label = convert_memory_address (Pmode, r_label);
930 r_save_area = expand_normal (t_save_area);
931 r_save_area = convert_memory_address (Pmode, r_save_area);
932 /* Copy the address of the save location to a register just in case it was based
933 on the frame pointer. */
934 r_save_area = copy_to_reg (r_save_area);
935 r_fp = gen_rtx_MEM (Pmode, r_save_area);
936 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
937 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
939 crtl->has_nonlocal_goto = 1;
941 #ifdef HAVE_nonlocal_goto
942 /* ??? We no longer need to pass the static chain value, afaik. */
943 if (HAVE_nonlocal_goto)
944 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
945 else
946 #endif
948 r_label = copy_to_reg (r_label);
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
953 /* Restore frame pointer for containing function.
954 This sets the actual hard register used for the frame pointer
955 to the location of the function's incoming static chain info.
956 The non-local goto handler will then adjust it to contain the
957 proper value and reload the argument pointer, if needed. */
958 emit_move_insn (hard_frame_pointer_rtx, r_fp);
959 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
961 /* USE of hard_frame_pointer_rtx added for consistency;
962 not clear if really needed. */
963 emit_use (hard_frame_pointer_rtx);
964 emit_use (stack_pointer_rtx);
966 /* If the architecture is using a GP register, we must
967 conservatively assume that the target function makes use of it.
968 The prologue of functions with nonlocal gotos must therefore
969 initialize the GP register to the appropriate value, and we
970 must then make sure that this value is live at the point
971 of the jump. (Note that this doesn't necessarily apply
972 to targets with a nonlocal_goto pattern; they are free
973 to implement it in their own way. Note also that this is
974 a no-op if the GP register is a global invariant.) */
975 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
976 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
977 emit_use (pic_offset_table_rtx);
979 emit_indirect_jump (r_label);
982 /* Search backwards to the jump insn and mark it as a
983 non-local goto. */
984 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
986 if (JUMP_P (insn))
988 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
989 break;
991 else if (CALL_P (insn))
992 break;
995 return const0_rtx;
998 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
999 (not all will be used on all machines) that was passed to __builtin_setjmp.
1000 It updates the stack pointer in that block to correspond to the current
1001 stack pointer. */
1003 static void
1004 expand_builtin_update_setjmp_buf (rtx buf_addr)
1006 enum machine_mode sa_mode = Pmode;
1007 rtx stack_save;
1010 #ifdef HAVE_save_stack_nonlocal
1011 if (HAVE_save_stack_nonlocal)
1012 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1013 #endif
1014 #ifdef STACK_SAVEAREA_MODE
1015 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1016 #endif
1018 stack_save
1019 = gen_rtx_MEM (sa_mode,
1020 memory_address
1021 (sa_mode,
1022 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1024 #ifdef HAVE_setjmp
1025 if (HAVE_setjmp)
1026 emit_insn (gen_setjmp ());
1027 #endif
1029 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1032 /* Expand a call to __builtin_prefetch. For a target that does not support
1033 data prefetch, evaluate the memory address argument in case it has side
1034 effects. */
1036 static void
1037 expand_builtin_prefetch (tree exp)
1039 tree arg0, arg1, arg2;
1040 int nargs;
1041 rtx op0, op1, op2;
1043 if (!validate_arglist (exp, POINTER_TYPE, 0))
1044 return;
1046 arg0 = CALL_EXPR_ARG (exp, 0);
1048 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1049 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1050 locality). */
1051 nargs = call_expr_nargs (exp);
1052 if (nargs > 1)
1053 arg1 = CALL_EXPR_ARG (exp, 1);
1054 else
1055 arg1 = integer_zero_node;
1056 if (nargs > 2)
1057 arg2 = CALL_EXPR_ARG (exp, 2);
1058 else
1059 arg2 = build_int_cst (NULL_TREE, 3);
1061 /* Argument 0 is an address. */
1062 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1064 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1065 if (TREE_CODE (arg1) != INTEGER_CST)
1067 error ("second argument to %<__builtin_prefetch%> must be a constant");
1068 arg1 = integer_zero_node;
1070 op1 = expand_normal (arg1);
1071 /* Argument 1 must be either zero or one. */
1072 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1074 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1075 " using zero");
1076 op1 = const0_rtx;
1079 /* Argument 2 (locality) must be a compile-time constant int. */
1080 if (TREE_CODE (arg2) != INTEGER_CST)
1082 error ("third argument to %<__builtin_prefetch%> must be a constant");
1083 arg2 = integer_zero_node;
1085 op2 = expand_normal (arg2);
1086 /* Argument 2 must be 0, 1, 2, or 3. */
1087 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1089 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1090 op2 = const0_rtx;
1093 #ifdef HAVE_prefetch
1094 if (HAVE_prefetch)
1096 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1097 (op0,
1098 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1099 || (GET_MODE (op0) != Pmode))
1101 op0 = convert_memory_address (Pmode, op0);
1102 op0 = force_reg (Pmode, op0);
1104 emit_insn (gen_prefetch (op0, op1, op2));
1106 #endif
1108 /* Don't do anything with direct references to volatile memory, but
1109 generate code to handle other side effects. */
1110 if (!MEM_P (op0) && side_effects_p (op0))
1111 emit_insn (op0);
1114 /* Get a MEM rtx for expression EXP which is the address of an operand
1115 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1116 the maximum length of the block of memory that might be accessed or
1117 NULL if unknown. */
1119 static rtx
1120 get_memory_rtx (tree exp, tree len)
1122 tree orig_exp = exp;
1123 rtx addr, mem;
1124 HOST_WIDE_INT off;
1126 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1127 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1128 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1129 exp = TREE_OPERAND (exp, 0);
1131 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1132 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1134 /* Get an expression we can use to find the attributes to assign to MEM.
1135 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1136 we can. First remove any nops. */
1137 while (CONVERT_EXPR_P (exp)
1138 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1139 exp = TREE_OPERAND (exp, 0);
1141 off = 0;
1142 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1143 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1144 && host_integerp (TREE_OPERAND (exp, 1), 0)
1145 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1146 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1147 else if (TREE_CODE (exp) == ADDR_EXPR)
1148 exp = TREE_OPERAND (exp, 0);
1149 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1150 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1151 else
1152 exp = NULL;
1154 /* Honor attributes derived from exp, except for the alias set
1155 (as builtin stringops may alias with anything) and the size
1156 (as stringops may access multiple array elements). */
1157 if (exp)
1159 set_mem_attributes (mem, exp, 0);
1161 if (off)
1162 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1164 /* Allow the string and memory builtins to overflow from one
1165 field into another, see http://gcc.gnu.org/PR23561.
1166 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1167 memory accessed by the string or memory builtin will fit
1168 within the field. */
1169 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1171 tree mem_expr = MEM_EXPR (mem);
1172 HOST_WIDE_INT offset = -1, length = -1;
1173 tree inner = exp;
1175 while (TREE_CODE (inner) == ARRAY_REF
1176 || CONVERT_EXPR_P (inner)
1177 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1178 || TREE_CODE (inner) == SAVE_EXPR)
1179 inner = TREE_OPERAND (inner, 0);
1181 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1183 if (MEM_OFFSET (mem)
1184 && CONST_INT_P (MEM_OFFSET (mem)))
1185 offset = INTVAL (MEM_OFFSET (mem));
1187 if (offset >= 0 && len && host_integerp (len, 0))
1188 length = tree_low_cst (len, 0);
1190 while (TREE_CODE (inner) == COMPONENT_REF)
1192 tree field = TREE_OPERAND (inner, 1);
1193 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1194 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1196 /* Bitfields are generally not byte-addressable. */
1197 gcc_assert (!DECL_BIT_FIELD (field)
1198 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199 % BITS_PER_UNIT) == 0
1200 && host_integerp (DECL_SIZE (field), 0)
1201 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1202 % BITS_PER_UNIT) == 0));
1204 /* If we can prove that the memory starting at XEXP (mem, 0) and
1205 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1206 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1207 fields without DECL_SIZE_UNIT like flexible array members. */
1208 if (length >= 0
1209 && DECL_SIZE_UNIT (field)
1210 && host_integerp (DECL_SIZE_UNIT (field), 0))
1212 HOST_WIDE_INT size
1213 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1214 if (offset <= size
1215 && length <= size
1216 && offset + length <= size)
1217 break;
1220 if (offset >= 0
1221 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1222 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1223 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1224 / BITS_PER_UNIT;
1225 else
1227 offset = -1;
1228 length = -1;
1231 mem_expr = TREE_OPERAND (mem_expr, 0);
1232 inner = TREE_OPERAND (inner, 0);
1235 if (mem_expr == NULL)
1236 offset = -1;
1237 if (mem_expr != MEM_EXPR (mem))
1239 set_mem_expr (mem, mem_expr);
1240 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1243 set_mem_alias_set (mem, 0);
1244 set_mem_size (mem, NULL_RTX);
1247 return mem;
1250 /* Built-in functions to perform an untyped call and return. */
1252 /* For each register that may be used for calling a function, this
1253 gives a mode used to copy the register's value. VOIDmode indicates
1254 the register is not used for calling a function. If the machine
1255 has register windows, this gives only the outbound registers.
1256 INCOMING_REGNO gives the corresponding inbound register. */
1257 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1259 /* For each register that may be used for returning values, this gives
1260 a mode used to copy the register's value. VOIDmode indicates the
1261 register is not used for returning values. If the machine has
1262 register windows, this gives only the outbound registers.
1263 INCOMING_REGNO gives the corresponding inbound register. */
1264 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1266 /* Return the size required for the block returned by __builtin_apply_args,
1267 and initialize apply_args_mode. */
1269 static int
1270 apply_args_size (void)
1272 static int size = -1;
1273 int align;
1274 unsigned int regno;
1275 enum machine_mode mode;
1277 /* The values computed by this function never change. */
1278 if (size < 0)
1280 /* The first value is the incoming arg-pointer. */
1281 size = GET_MODE_SIZE (Pmode);
1283 /* The second value is the structure value address unless this is
1284 passed as an "invisible" first argument. */
1285 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1286 size += GET_MODE_SIZE (Pmode);
1288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1289 if (FUNCTION_ARG_REGNO_P (regno))
1291 mode = reg_raw_mode[regno];
1293 gcc_assert (mode != VOIDmode);
1295 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1296 if (size % align != 0)
1297 size = CEIL (size, align) * align;
1298 size += GET_MODE_SIZE (mode);
1299 apply_args_mode[regno] = mode;
1301 else
1303 apply_args_mode[regno] = VOIDmode;
1306 return size;
1309 /* Return the size required for the block returned by __builtin_apply,
1310 and initialize apply_result_mode. */
1312 static int
1313 apply_result_size (void)
1315 static int size = -1;
1316 int align, regno;
1317 enum machine_mode mode;
1319 /* The values computed by this function never change. */
1320 if (size < 0)
1322 size = 0;
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if (FUNCTION_VALUE_REGNO_P (regno))
1327 mode = reg_raw_mode[regno];
1329 gcc_assert (mode != VOIDmode);
1331 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1332 if (size % align != 0)
1333 size = CEIL (size, align) * align;
1334 size += GET_MODE_SIZE (mode);
1335 apply_result_mode[regno] = mode;
1337 else
1338 apply_result_mode[regno] = VOIDmode;
1340 /* Allow targets that use untyped_call and untyped_return to override
1341 the size so that machine-specific information can be stored here. */
1342 #ifdef APPLY_RESULT_SIZE
1343 size = APPLY_RESULT_SIZE;
1344 #endif
1346 return size;
1349 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1350 /* Create a vector describing the result block RESULT. If SAVEP is true,
1351 the result block is used to save the values; otherwise it is used to
1352 restore the values. */
1354 static rtx
1355 result_vector (int savep, rtx result)
1357 int regno, size, align, nelts;
1358 enum machine_mode mode;
1359 rtx reg, mem;
1360 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1362 size = nelts = 0;
1363 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1364 if ((mode = apply_result_mode[regno]) != VOIDmode)
1366 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1367 if (size % align != 0)
1368 size = CEIL (size, align) * align;
1369 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1370 mem = adjust_address (result, mode, size);
1371 savevec[nelts++] = (savep
1372 ? gen_rtx_SET (VOIDmode, mem, reg)
1373 : gen_rtx_SET (VOIDmode, reg, mem));
1374 size += GET_MODE_SIZE (mode);
1376 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1378 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1380 /* Save the state required to perform an untyped call with the same
1381 arguments as were passed to the current function. */
1383 static rtx
1384 expand_builtin_apply_args_1 (void)
1386 rtx registers, tem;
1387 int size, align, regno;
1388 enum machine_mode mode;
1389 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1391 /* Create a block where the arg-pointer, structure value address,
1392 and argument registers can be saved. */
1393 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1395 /* Walk past the arg-pointer and structure value address. */
1396 size = GET_MODE_SIZE (Pmode);
1397 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1398 size += GET_MODE_SIZE (Pmode);
1400 /* Save each register used in calling a function to the block. */
1401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402 if ((mode = apply_args_mode[regno]) != VOIDmode)
1404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1405 if (size % align != 0)
1406 size = CEIL (size, align) * align;
1408 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1410 emit_move_insn (adjust_address (registers, mode, size), tem);
1411 size += GET_MODE_SIZE (mode);
1414 /* Save the arg pointer to the block. */
1415 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1416 #ifdef STACK_GROWS_DOWNWARD
1417 /* We need the pointer as the caller actually passed them to us, not
1418 as we might have pretended they were passed. Make sure it's a valid
1419 operand, as emit_move_insn isn't expected to handle a PLUS. */
1421 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1422 NULL_RTX);
1423 #endif
1424 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1426 size = GET_MODE_SIZE (Pmode);
1428 /* Save the structure value address unless this is passed as an
1429 "invisible" first argument. */
1430 if (struct_incoming_value)
1432 emit_move_insn (adjust_address (registers, Pmode, size),
1433 copy_to_reg (struct_incoming_value));
1434 size += GET_MODE_SIZE (Pmode);
1437 /* Return the address of the block. */
1438 return copy_addr_to_reg (XEXP (registers, 0));
1441 /* __builtin_apply_args returns block of memory allocated on
1442 the stack into which is stored the arg pointer, structure
1443 value address, static chain, and all the registers that might
1444 possibly be used in performing a function call. The code is
1445 moved to the start of the function so the incoming values are
1446 saved. */
1448 static rtx
1449 expand_builtin_apply_args (void)
1451 /* Don't do __builtin_apply_args more than once in a function.
1452 Save the result of the first call and reuse it. */
1453 if (apply_args_value != 0)
1454 return apply_args_value;
1456 /* When this function is called, it means that registers must be
1457 saved on entry to this function. So we migrate the
1458 call to the first insn of this function. */
1459 rtx temp;
1460 rtx seq;
1462 start_sequence ();
1463 temp = expand_builtin_apply_args_1 ();
1464 seq = get_insns ();
1465 end_sequence ();
1467 apply_args_value = temp;
1469 /* Put the insns after the NOTE that starts the function.
1470 If this is inside a start_sequence, make the outer-level insn
1471 chain current, so the code is placed at the start of the
1472 function. If internal_arg_pointer is a non-virtual pseudo,
1473 it needs to be placed after the function that initializes
1474 that pseudo. */
1475 push_topmost_sequence ();
1476 if (REG_P (crtl->args.internal_arg_pointer)
1477 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1478 emit_insn_before (seq, parm_birth_insn);
1479 else
1480 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1481 pop_topmost_sequence ();
1482 return temp;
1486 /* Perform an untyped call and save the state required to perform an
1487 untyped return of whatever value was returned by the given function. */
1489 static rtx
1490 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1492 int size, align, regno;
1493 enum machine_mode mode;
1494 rtx incoming_args, result, reg, dest, src, call_insn;
1495 rtx old_stack_level = 0;
1496 rtx call_fusage = 0;
1497 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1499 arguments = convert_memory_address (Pmode, arguments);
1501 /* Create a block where the return registers can be saved. */
1502 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1504 /* Fetch the arg pointer from the ARGUMENTS block. */
1505 incoming_args = gen_reg_rtx (Pmode);
1506 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1507 #ifndef STACK_GROWS_DOWNWARD
1508 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1509 incoming_args, 0, OPTAB_LIB_WIDEN);
1510 #endif
1512 /* Push a new argument block and copy the arguments. Do not allow
1513 the (potential) memcpy call below to interfere with our stack
1514 manipulations. */
1515 do_pending_stack_adjust ();
1516 NO_DEFER_POP;
1518 /* Save the stack with nonlocal if available. */
1519 #ifdef HAVE_save_stack_nonlocal
1520 if (HAVE_save_stack_nonlocal)
1521 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1522 else
1523 #endif
1524 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1526 /* Allocate a block of memory onto the stack and copy the memory
1527 arguments to the outgoing arguments address. */
1528 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1530 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1531 may have already set current_function_calls_alloca to true.
1532 current_function_calls_alloca won't be set if argsize is zero,
1533 so we have to guarantee need_drap is true here. */
1534 if (SUPPORTS_STACK_ALIGNMENT)
1535 crtl->need_drap = true;
1537 dest = virtual_outgoing_args_rtx;
1538 #ifndef STACK_GROWS_DOWNWARD
1539 if (CONST_INT_P (argsize))
1540 dest = plus_constant (dest, -INTVAL (argsize));
1541 else
1542 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1543 #endif
1544 dest = gen_rtx_MEM (BLKmode, dest);
1545 set_mem_align (dest, PARM_BOUNDARY);
1546 src = gen_rtx_MEM (BLKmode, incoming_args);
1547 set_mem_align (src, PARM_BOUNDARY);
1548 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1550 /* Refer to the argument block. */
1551 apply_args_size ();
1552 arguments = gen_rtx_MEM (BLKmode, arguments);
1553 set_mem_align (arguments, PARM_BOUNDARY);
1555 /* Walk past the arg-pointer and structure value address. */
1556 size = GET_MODE_SIZE (Pmode);
1557 if (struct_value)
1558 size += GET_MODE_SIZE (Pmode);
1560 /* Restore each of the registers previously saved. Make USE insns
1561 for each of these registers for use in making the call. */
1562 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1563 if ((mode = apply_args_mode[regno]) != VOIDmode)
1565 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1566 if (size % align != 0)
1567 size = CEIL (size, align) * align;
1568 reg = gen_rtx_REG (mode, regno);
1569 emit_move_insn (reg, adjust_address (arguments, mode, size));
1570 use_reg (&call_fusage, reg);
1571 size += GET_MODE_SIZE (mode);
1574 /* Restore the structure value address unless this is passed as an
1575 "invisible" first argument. */
1576 size = GET_MODE_SIZE (Pmode);
1577 if (struct_value)
1579 rtx value = gen_reg_rtx (Pmode);
1580 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1581 emit_move_insn (struct_value, value);
1582 if (REG_P (struct_value))
1583 use_reg (&call_fusage, struct_value);
1584 size += GET_MODE_SIZE (Pmode);
1587 /* All arguments and registers used for the call are set up by now! */
1588 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1590 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1591 and we don't want to load it into a register as an optimization,
1592 because prepare_call_address already did it if it should be done. */
1593 if (GET_CODE (function) != SYMBOL_REF)
1594 function = memory_address (FUNCTION_MODE, function);
1596 /* Generate the actual call instruction and save the return value. */
1597 #ifdef HAVE_untyped_call
1598 if (HAVE_untyped_call)
1599 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1600 result, result_vector (1, result)));
1601 else
1602 #endif
1603 #ifdef HAVE_call_value
1604 if (HAVE_call_value)
1606 rtx valreg = 0;
1608 /* Locate the unique return register. It is not possible to
1609 express a call that sets more than one return register using
1610 call_value; use untyped_call for that. In fact, untyped_call
1611 only needs to save the return registers in the given block. */
1612 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1613 if ((mode = apply_result_mode[regno]) != VOIDmode)
1615 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1617 valreg = gen_rtx_REG (mode, regno);
1620 emit_call_insn (GEN_CALL_VALUE (valreg,
1621 gen_rtx_MEM (FUNCTION_MODE, function),
1622 const0_rtx, NULL_RTX, const0_rtx));
1624 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1626 else
1627 #endif
1628 gcc_unreachable ();
1630 /* Find the CALL insn we just emitted, and attach the register usage
1631 information. */
1632 call_insn = last_call_insn ();
1633 add_function_usage_to (call_insn, call_fusage);
1635 /* Restore the stack. */
1636 #ifdef HAVE_save_stack_nonlocal
1637 if (HAVE_save_stack_nonlocal)
1638 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1639 else
1640 #endif
1641 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1643 OK_DEFER_POP;
1645 /* Return the address of the result block. */
1646 result = copy_addr_to_reg (XEXP (result, 0));
1647 return convert_memory_address (ptr_mode, result);
1650 /* Perform an untyped return. */
1652 static void
1653 expand_builtin_return (rtx result)
1655 int size, align, regno;
1656 enum machine_mode mode;
1657 rtx reg;
1658 rtx call_fusage = 0;
1660 result = convert_memory_address (Pmode, result);
1662 apply_result_size ();
1663 result = gen_rtx_MEM (BLKmode, result);
1665 #ifdef HAVE_untyped_return
1666 if (HAVE_untyped_return)
1668 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1669 emit_barrier ();
1670 return;
1672 #endif
1674 /* Restore the return value and note that each value is used. */
1675 size = 0;
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1679 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1680 if (size % align != 0)
1681 size = CEIL (size, align) * align;
1682 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1683 emit_move_insn (reg, adjust_address (result, mode, size));
1685 push_to_sequence (call_fusage);
1686 emit_use (reg);
1687 call_fusage = get_insns ();
1688 end_sequence ();
1689 size += GET_MODE_SIZE (mode);
1692 /* Put the USE insns before the return. */
1693 emit_insn (call_fusage);
1695 /* Return whatever values was restored by jumping directly to the end
1696 of the function. */
1697 expand_naked_return ();
1700 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1702 static enum type_class
1703 type_to_class (tree type)
1705 switch (TREE_CODE (type))
1707 case VOID_TYPE: return void_type_class;
1708 case INTEGER_TYPE: return integer_type_class;
1709 case ENUMERAL_TYPE: return enumeral_type_class;
1710 case BOOLEAN_TYPE: return boolean_type_class;
1711 case POINTER_TYPE: return pointer_type_class;
1712 case REFERENCE_TYPE: return reference_type_class;
1713 case OFFSET_TYPE: return offset_type_class;
1714 case REAL_TYPE: return real_type_class;
1715 case COMPLEX_TYPE: return complex_type_class;
1716 case FUNCTION_TYPE: return function_type_class;
1717 case METHOD_TYPE: return method_type_class;
1718 case RECORD_TYPE: return record_type_class;
1719 case UNION_TYPE:
1720 case QUAL_UNION_TYPE: return union_type_class;
1721 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1722 ? string_type_class : array_type_class);
1723 case LANG_TYPE: return lang_type_class;
1724 default: return no_type_class;
1728 /* Expand a call EXP to __builtin_classify_type. */
1730 static rtx
1731 expand_builtin_classify_type (tree exp)
1733 if (call_expr_nargs (exp))
1734 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1735 return GEN_INT (no_type_class);
1738 /* This helper macro, meant to be used in mathfn_built_in below,
1739 determines which among a set of three builtin math functions is
1740 appropriate for a given type mode. The `F' and `L' cases are
1741 automatically generated from the `double' case. */
1742 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1743 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1744 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1745 fcodel = BUILT_IN_MATHFN##L ; break;
1746 /* Similar to above, but appends _R after any F/L suffix. */
1747 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1748 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1749 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1750 fcodel = BUILT_IN_MATHFN##L_R ; break;
1752 /* Return mathematic function equivalent to FN but operating directly
1753 on TYPE, if available. If IMPLICIT is true find the function in
1754 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1755 can't do the conversion, return zero. */
1757 static tree
1758 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1760 tree const *const fn_arr
1761 = implicit ? implicit_built_in_decls : built_in_decls;
1762 enum built_in_function fcode, fcodef, fcodel;
1764 switch (fn)
1766 CASE_MATHFN (BUILT_IN_ACOS)
1767 CASE_MATHFN (BUILT_IN_ACOSH)
1768 CASE_MATHFN (BUILT_IN_ASIN)
1769 CASE_MATHFN (BUILT_IN_ASINH)
1770 CASE_MATHFN (BUILT_IN_ATAN)
1771 CASE_MATHFN (BUILT_IN_ATAN2)
1772 CASE_MATHFN (BUILT_IN_ATANH)
1773 CASE_MATHFN (BUILT_IN_CBRT)
1774 CASE_MATHFN (BUILT_IN_CEIL)
1775 CASE_MATHFN (BUILT_IN_CEXPI)
1776 CASE_MATHFN (BUILT_IN_COPYSIGN)
1777 CASE_MATHFN (BUILT_IN_COS)
1778 CASE_MATHFN (BUILT_IN_COSH)
1779 CASE_MATHFN (BUILT_IN_DREM)
1780 CASE_MATHFN (BUILT_IN_ERF)
1781 CASE_MATHFN (BUILT_IN_ERFC)
1782 CASE_MATHFN (BUILT_IN_EXP)
1783 CASE_MATHFN (BUILT_IN_EXP10)
1784 CASE_MATHFN (BUILT_IN_EXP2)
1785 CASE_MATHFN (BUILT_IN_EXPM1)
1786 CASE_MATHFN (BUILT_IN_FABS)
1787 CASE_MATHFN (BUILT_IN_FDIM)
1788 CASE_MATHFN (BUILT_IN_FLOOR)
1789 CASE_MATHFN (BUILT_IN_FMA)
1790 CASE_MATHFN (BUILT_IN_FMAX)
1791 CASE_MATHFN (BUILT_IN_FMIN)
1792 CASE_MATHFN (BUILT_IN_FMOD)
1793 CASE_MATHFN (BUILT_IN_FREXP)
1794 CASE_MATHFN (BUILT_IN_GAMMA)
1795 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1796 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1797 CASE_MATHFN (BUILT_IN_HYPOT)
1798 CASE_MATHFN (BUILT_IN_ILOGB)
1799 CASE_MATHFN (BUILT_IN_INF)
1800 CASE_MATHFN (BUILT_IN_ISINF)
1801 CASE_MATHFN (BUILT_IN_J0)
1802 CASE_MATHFN (BUILT_IN_J1)
1803 CASE_MATHFN (BUILT_IN_JN)
1804 CASE_MATHFN (BUILT_IN_LCEIL)
1805 CASE_MATHFN (BUILT_IN_LDEXP)
1806 CASE_MATHFN (BUILT_IN_LFLOOR)
1807 CASE_MATHFN (BUILT_IN_LGAMMA)
1808 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1809 CASE_MATHFN (BUILT_IN_LLCEIL)
1810 CASE_MATHFN (BUILT_IN_LLFLOOR)
1811 CASE_MATHFN (BUILT_IN_LLRINT)
1812 CASE_MATHFN (BUILT_IN_LLROUND)
1813 CASE_MATHFN (BUILT_IN_LOG)
1814 CASE_MATHFN (BUILT_IN_LOG10)
1815 CASE_MATHFN (BUILT_IN_LOG1P)
1816 CASE_MATHFN (BUILT_IN_LOG2)
1817 CASE_MATHFN (BUILT_IN_LOGB)
1818 CASE_MATHFN (BUILT_IN_LRINT)
1819 CASE_MATHFN (BUILT_IN_LROUND)
1820 CASE_MATHFN (BUILT_IN_MODF)
1821 CASE_MATHFN (BUILT_IN_NAN)
1822 CASE_MATHFN (BUILT_IN_NANS)
1823 CASE_MATHFN (BUILT_IN_NEARBYINT)
1824 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1825 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1826 CASE_MATHFN (BUILT_IN_POW)
1827 CASE_MATHFN (BUILT_IN_POWI)
1828 CASE_MATHFN (BUILT_IN_POW10)
1829 CASE_MATHFN (BUILT_IN_REMAINDER)
1830 CASE_MATHFN (BUILT_IN_REMQUO)
1831 CASE_MATHFN (BUILT_IN_RINT)
1832 CASE_MATHFN (BUILT_IN_ROUND)
1833 CASE_MATHFN (BUILT_IN_SCALB)
1834 CASE_MATHFN (BUILT_IN_SCALBLN)
1835 CASE_MATHFN (BUILT_IN_SCALBN)
1836 CASE_MATHFN (BUILT_IN_SIGNBIT)
1837 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1838 CASE_MATHFN (BUILT_IN_SIN)
1839 CASE_MATHFN (BUILT_IN_SINCOS)
1840 CASE_MATHFN (BUILT_IN_SINH)
1841 CASE_MATHFN (BUILT_IN_SQRT)
1842 CASE_MATHFN (BUILT_IN_TAN)
1843 CASE_MATHFN (BUILT_IN_TANH)
1844 CASE_MATHFN (BUILT_IN_TGAMMA)
1845 CASE_MATHFN (BUILT_IN_TRUNC)
1846 CASE_MATHFN (BUILT_IN_Y0)
1847 CASE_MATHFN (BUILT_IN_Y1)
1848 CASE_MATHFN (BUILT_IN_YN)
1850 default:
1851 return NULL_TREE;
1854 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1855 return fn_arr[fcode];
1856 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1857 return fn_arr[fcodef];
1858 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1859 return fn_arr[fcodel];
1860 else
1861 return NULL_TREE;
1864 /* Like mathfn_built_in_1(), but always use the implicit array. */
1866 tree
1867 mathfn_built_in (tree type, enum built_in_function fn)
1869 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1872 /* If errno must be maintained, expand the RTL to check if the result,
1873 TARGET, of a built-in function call, EXP, is NaN, and if so set
1874 errno to EDOM. */
1876 static void
1877 expand_errno_check (tree exp, rtx target)
1879 rtx lab = gen_label_rtx ();
1881 /* Test the result; if it is NaN, set errno=EDOM because
1882 the argument was not in the domain. */
1883 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1884 NULL_RTX, NULL_RTX, lab,
1885 /* The jump is very likely. */
1886 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1888 #ifdef TARGET_EDOM
1889 /* If this built-in doesn't throw an exception, set errno directly. */
1890 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1892 #ifdef GEN_ERRNO_RTX
1893 rtx errno_rtx = GEN_ERRNO_RTX;
1894 #else
1895 rtx errno_rtx
1896 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1897 #endif
1898 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1899 emit_label (lab);
1900 return;
1902 #endif
1904 /* Make sure the library call isn't expanded as a tail call. */
1905 CALL_EXPR_TAILCALL (exp) = 0;
1907 /* We can't set errno=EDOM directly; let the library call do it.
1908 Pop the arguments right away in case the call gets deleted. */
1909 NO_DEFER_POP;
1910 expand_call (exp, target, 0);
1911 OK_DEFER_POP;
1912 emit_label (lab);
1915 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1916 Return NULL_RTX if a normal call should be emitted rather than expanding
1917 the function in-line. EXP is the expression that is a call to the builtin
1918 function; if convenient, the result should be placed in TARGET.
1919 SUBTARGET may be used as the target for computing one of EXP's operands. */
1921 static rtx
1922 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1924 optab builtin_optab;
1925 rtx op0, insns;
1926 tree fndecl = get_callee_fndecl (exp);
1927 enum machine_mode mode;
1928 bool errno_set = false;
1929 tree arg;
1931 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1932 return NULL_RTX;
1934 arg = CALL_EXPR_ARG (exp, 0);
1936 switch (DECL_FUNCTION_CODE (fndecl))
1938 CASE_FLT_FN (BUILT_IN_SQRT):
1939 errno_set = ! tree_expr_nonnegative_p (arg);
1940 builtin_optab = sqrt_optab;
1941 break;
1942 CASE_FLT_FN (BUILT_IN_EXP):
1943 errno_set = true; builtin_optab = exp_optab; break;
1944 CASE_FLT_FN (BUILT_IN_EXP10):
1945 CASE_FLT_FN (BUILT_IN_POW10):
1946 errno_set = true; builtin_optab = exp10_optab; break;
1947 CASE_FLT_FN (BUILT_IN_EXP2):
1948 errno_set = true; builtin_optab = exp2_optab; break;
1949 CASE_FLT_FN (BUILT_IN_EXPM1):
1950 errno_set = true; builtin_optab = expm1_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOGB):
1952 errno_set = true; builtin_optab = logb_optab; break;
1953 CASE_FLT_FN (BUILT_IN_LOG):
1954 errno_set = true; builtin_optab = log_optab; break;
1955 CASE_FLT_FN (BUILT_IN_LOG10):
1956 errno_set = true; builtin_optab = log10_optab; break;
1957 CASE_FLT_FN (BUILT_IN_LOG2):
1958 errno_set = true; builtin_optab = log2_optab; break;
1959 CASE_FLT_FN (BUILT_IN_LOG1P):
1960 errno_set = true; builtin_optab = log1p_optab; break;
1961 CASE_FLT_FN (BUILT_IN_ASIN):
1962 builtin_optab = asin_optab; break;
1963 CASE_FLT_FN (BUILT_IN_ACOS):
1964 builtin_optab = acos_optab; break;
1965 CASE_FLT_FN (BUILT_IN_TAN):
1966 builtin_optab = tan_optab; break;
1967 CASE_FLT_FN (BUILT_IN_ATAN):
1968 builtin_optab = atan_optab; break;
1969 CASE_FLT_FN (BUILT_IN_FLOOR):
1970 builtin_optab = floor_optab; break;
1971 CASE_FLT_FN (BUILT_IN_CEIL):
1972 builtin_optab = ceil_optab; break;
1973 CASE_FLT_FN (BUILT_IN_TRUNC):
1974 builtin_optab = btrunc_optab; break;
1975 CASE_FLT_FN (BUILT_IN_ROUND):
1976 builtin_optab = round_optab; break;
1977 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1978 builtin_optab = nearbyint_optab;
1979 if (flag_trapping_math)
1980 break;
1981 /* Else fallthrough and expand as rint. */
1982 CASE_FLT_FN (BUILT_IN_RINT):
1983 builtin_optab = rint_optab; break;
1984 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1985 builtin_optab = significand_optab; break;
1986 default:
1987 gcc_unreachable ();
1990 /* Make a suitable register to place result in. */
1991 mode = TYPE_MODE (TREE_TYPE (exp));
1993 if (! flag_errno_math || ! HONOR_NANS (mode))
1994 errno_set = false;
1996 /* Before working hard, check whether the instruction is available. */
1997 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1999 target = gen_reg_rtx (mode);
2001 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2002 need to expand the argument again. This way, we will not perform
2003 side-effects more the once. */
2004 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2006 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2008 start_sequence ();
2010 /* Compute into TARGET.
2011 Set TARGET to wherever the result comes back. */
2012 target = expand_unop (mode, builtin_optab, op0, target, 0);
2014 if (target != 0)
2016 if (errno_set)
2017 expand_errno_check (exp, target);
2019 /* Output the entire sequence. */
2020 insns = get_insns ();
2021 end_sequence ();
2022 emit_insn (insns);
2023 return target;
2026 /* If we were unable to expand via the builtin, stop the sequence
2027 (without outputting the insns) and call to the library function
2028 with the stabilized argument list. */
2029 end_sequence ();
2032 return expand_call (exp, target, target == const0_rtx);
2035 /* Expand a call to the builtin binary math functions (pow and atan2).
2036 Return NULL_RTX if a normal call should be emitted rather than expanding the
2037 function in-line. EXP is the expression that is a call to the builtin
2038 function; if convenient, the result should be placed in TARGET.
2039 SUBTARGET may be used as the target for computing one of EXP's
2040 operands. */
2042 static rtx
2043 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2045 optab builtin_optab;
2046 rtx op0, op1, insns;
2047 int op1_type = REAL_TYPE;
2048 tree fndecl = get_callee_fndecl (exp);
2049 tree arg0, arg1;
2050 enum machine_mode mode;
2051 bool errno_set = true;
2053 switch (DECL_FUNCTION_CODE (fndecl))
2055 CASE_FLT_FN (BUILT_IN_SCALBN):
2056 CASE_FLT_FN (BUILT_IN_SCALBLN):
2057 CASE_FLT_FN (BUILT_IN_LDEXP):
2058 op1_type = INTEGER_TYPE;
2059 default:
2060 break;
2063 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2064 return NULL_RTX;
2066 arg0 = CALL_EXPR_ARG (exp, 0);
2067 arg1 = CALL_EXPR_ARG (exp, 1);
2069 switch (DECL_FUNCTION_CODE (fndecl))
2071 CASE_FLT_FN (BUILT_IN_POW):
2072 builtin_optab = pow_optab; break;
2073 CASE_FLT_FN (BUILT_IN_ATAN2):
2074 builtin_optab = atan2_optab; break;
2075 CASE_FLT_FN (BUILT_IN_SCALB):
2076 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2077 return 0;
2078 builtin_optab = scalb_optab; break;
2079 CASE_FLT_FN (BUILT_IN_SCALBN):
2080 CASE_FLT_FN (BUILT_IN_SCALBLN):
2081 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2082 return 0;
2083 /* Fall through... */
2084 CASE_FLT_FN (BUILT_IN_LDEXP):
2085 builtin_optab = ldexp_optab; break;
2086 CASE_FLT_FN (BUILT_IN_FMOD):
2087 builtin_optab = fmod_optab; break;
2088 CASE_FLT_FN (BUILT_IN_REMAINDER):
2089 CASE_FLT_FN (BUILT_IN_DREM):
2090 builtin_optab = remainder_optab; break;
2091 default:
2092 gcc_unreachable ();
2095 /* Make a suitable register to place result in. */
2096 mode = TYPE_MODE (TREE_TYPE (exp));
2098 /* Before working hard, check whether the instruction is available. */
2099 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2100 return NULL_RTX;
2102 target = gen_reg_rtx (mode);
2104 if (! flag_errno_math || ! HONOR_NANS (mode))
2105 errno_set = false;
2107 /* Always stabilize the argument list. */
2108 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2109 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2111 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2112 op1 = expand_normal (arg1);
2114 start_sequence ();
2116 /* Compute into TARGET.
2117 Set TARGET to wherever the result comes back. */
2118 target = expand_binop (mode, builtin_optab, op0, op1,
2119 target, 0, OPTAB_DIRECT);
2121 /* If we were unable to expand via the builtin, stop the sequence
2122 (without outputting the insns) and call to the library function
2123 with the stabilized argument list. */
2124 if (target == 0)
2126 end_sequence ();
2127 return expand_call (exp, target, target == const0_rtx);
2130 if (errno_set)
2131 expand_errno_check (exp, target);
2133 /* Output the entire sequence. */
2134 insns = get_insns ();
2135 end_sequence ();
2136 emit_insn (insns);
2138 return target;
2141 /* Expand a call to the builtin sin and cos math functions.
2142 Return NULL_RTX if a normal call should be emitted rather than expanding the
2143 function in-line. EXP is the expression that is a call to the builtin
2144 function; if convenient, the result should be placed in TARGET.
2145 SUBTARGET may be used as the target for computing one of EXP's
2146 operands. */
2148 static rtx
2149 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2151 optab builtin_optab;
2152 rtx op0, insns;
2153 tree fndecl = get_callee_fndecl (exp);
2154 enum machine_mode mode;
2155 tree arg;
2157 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2158 return NULL_RTX;
2160 arg = CALL_EXPR_ARG (exp, 0);
2162 switch (DECL_FUNCTION_CODE (fndecl))
2164 CASE_FLT_FN (BUILT_IN_SIN):
2165 CASE_FLT_FN (BUILT_IN_COS):
2166 builtin_optab = sincos_optab; break;
2167 default:
2168 gcc_unreachable ();
2171 /* Make a suitable register to place result in. */
2172 mode = TYPE_MODE (TREE_TYPE (exp));
2174 /* Check if sincos insn is available, otherwise fallback
2175 to sin or cos insn. */
2176 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2177 switch (DECL_FUNCTION_CODE (fndecl))
2179 CASE_FLT_FN (BUILT_IN_SIN):
2180 builtin_optab = sin_optab; break;
2181 CASE_FLT_FN (BUILT_IN_COS):
2182 builtin_optab = cos_optab; break;
2183 default:
2184 gcc_unreachable ();
2187 /* Before working hard, check whether the instruction is available. */
2188 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2190 target = gen_reg_rtx (mode);
2192 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2193 need to expand the argument again. This way, we will not perform
2194 side-effects more the once. */
2195 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2197 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2199 start_sequence ();
2201 /* Compute into TARGET.
2202 Set TARGET to wherever the result comes back. */
2203 if (builtin_optab == sincos_optab)
2205 int result;
2207 switch (DECL_FUNCTION_CODE (fndecl))
2209 CASE_FLT_FN (BUILT_IN_SIN):
2210 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2211 break;
2212 CASE_FLT_FN (BUILT_IN_COS):
2213 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2214 break;
2215 default:
2216 gcc_unreachable ();
2218 gcc_assert (result);
2220 else
2222 target = expand_unop (mode, builtin_optab, op0, target, 0);
2225 if (target != 0)
2227 /* Output the entire sequence. */
2228 insns = get_insns ();
2229 end_sequence ();
2230 emit_insn (insns);
2231 return target;
2234 /* If we were unable to expand via the builtin, stop the sequence
2235 (without outputting the insns) and call to the library function
2236 with the stabilized argument list. */
2237 end_sequence ();
2240 target = expand_call (exp, target, target == const0_rtx);
2242 return target;
2245 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2246 return an RTL instruction code that implements the functionality.
2247 If that isn't possible or available return CODE_FOR_nothing. */
2249 static enum insn_code
2250 interclass_mathfn_icode (tree arg, tree fndecl)
2252 bool errno_set = false;
2253 optab builtin_optab = 0;
2254 enum machine_mode mode;
2256 switch (DECL_FUNCTION_CODE (fndecl))
2258 CASE_FLT_FN (BUILT_IN_ILOGB):
2259 errno_set = true; builtin_optab = ilogb_optab; break;
2260 CASE_FLT_FN (BUILT_IN_ISINF):
2261 builtin_optab = isinf_optab; break;
2262 case BUILT_IN_ISNORMAL:
2263 case BUILT_IN_ISFINITE:
2264 CASE_FLT_FN (BUILT_IN_FINITE):
2265 case BUILT_IN_FINITED32:
2266 case BUILT_IN_FINITED64:
2267 case BUILT_IN_FINITED128:
2268 case BUILT_IN_ISINFD32:
2269 case BUILT_IN_ISINFD64:
2270 case BUILT_IN_ISINFD128:
2271 /* These builtins have no optabs (yet). */
2272 break;
2273 default:
2274 gcc_unreachable ();
2277 /* There's no easy way to detect the case we need to set EDOM. */
2278 if (flag_errno_math && errno_set)
2279 return CODE_FOR_nothing;
2281 /* Optab mode depends on the mode of the input argument. */
2282 mode = TYPE_MODE (TREE_TYPE (arg));
2284 if (builtin_optab)
2285 return optab_handler (builtin_optab, mode)->insn_code;
2286 return CODE_FOR_nothing;
2289 /* Expand a call to one of the builtin math functions that operate on
2290 floating point argument and output an integer result (ilogb, isinf,
2291 isnan, etc).
2292 Return 0 if a normal call should be emitted rather than expanding the
2293 function in-line. EXP is the expression that is a call to the builtin
2294 function; if convenient, the result should be placed in TARGET.
2295 SUBTARGET may be used as the target for computing one of EXP's operands. */
2297 static rtx
2298 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2300 enum insn_code icode = CODE_FOR_nothing;
2301 rtx op0;
2302 tree fndecl = get_callee_fndecl (exp);
2303 enum machine_mode mode;
2304 tree arg;
2306 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2307 return NULL_RTX;
2309 arg = CALL_EXPR_ARG (exp, 0);
2310 icode = interclass_mathfn_icode (arg, fndecl);
2311 mode = TYPE_MODE (TREE_TYPE (arg));
2313 if (icode != CODE_FOR_nothing)
2315 rtx last = get_last_insn ();
2316 tree orig_arg = arg;
2317 /* Make a suitable register to place result in. */
2318 if (!target
2319 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2320 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2321 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2323 gcc_assert (insn_data[icode].operand[0].predicate
2324 (target, GET_MODE (target)));
2326 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2327 need to expand the argument again. This way, we will not perform
2328 side-effects more the once. */
2329 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2331 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2333 if (mode != GET_MODE (op0))
2334 op0 = convert_to_mode (mode, op0, 0);
2336 /* Compute into TARGET.
2337 Set TARGET to wherever the result comes back. */
2338 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2339 return target;
2340 delete_insns_since (last);
2341 CALL_EXPR_ARG (exp, 0) = orig_arg;
2344 return NULL_RTX;
2347 /* Expand a call to the builtin sincos math function.
2348 Return NULL_RTX if a normal call should be emitted rather than expanding the
2349 function in-line. EXP is the expression that is a call to the builtin
2350 function. */
2352 static rtx
2353 expand_builtin_sincos (tree exp)
2355 rtx op0, op1, op2, target1, target2;
2356 enum machine_mode mode;
2357 tree arg, sinp, cosp;
2358 int result;
2359 location_t loc = EXPR_LOCATION (exp);
2361 if (!validate_arglist (exp, REAL_TYPE,
2362 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2363 return NULL_RTX;
2365 arg = CALL_EXPR_ARG (exp, 0);
2366 sinp = CALL_EXPR_ARG (exp, 1);
2367 cosp = CALL_EXPR_ARG (exp, 2);
2369 /* Make a suitable register to place result in. */
2370 mode = TYPE_MODE (TREE_TYPE (arg));
2372 /* Check if sincos insn is available, otherwise emit the call. */
2373 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2374 return NULL_RTX;
2376 target1 = gen_reg_rtx (mode);
2377 target2 = gen_reg_rtx (mode);
2379 op0 = expand_normal (arg);
2380 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2381 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2383 /* Compute into target1 and target2.
2384 Set TARGET to wherever the result comes back. */
2385 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2386 gcc_assert (result);
2388 /* Move target1 and target2 to the memory locations indicated
2389 by op1 and op2. */
2390 emit_move_insn (op1, target1);
2391 emit_move_insn (op2, target2);
2393 return const0_rtx;
2396 /* Expand a call to the internal cexpi builtin to the sincos math function.
2397 EXP is the expression that is a call to the builtin function; if convenient,
2398 the result should be placed in TARGET. SUBTARGET may be used as the target
2399 for computing one of EXP's operands. */
2401 static rtx
2402 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2404 tree fndecl = get_callee_fndecl (exp);
2405 tree arg, type;
2406 enum machine_mode mode;
2407 rtx op0, op1, op2;
2408 location_t loc = EXPR_LOCATION (exp);
2410 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2411 return NULL_RTX;
2413 arg = CALL_EXPR_ARG (exp, 0);
2414 type = TREE_TYPE (arg);
2415 mode = TYPE_MODE (TREE_TYPE (arg));
2417 /* Try expanding via a sincos optab, fall back to emitting a libcall
2418 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2419 is only generated from sincos, cexp or if we have either of them. */
2420 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2422 op1 = gen_reg_rtx (mode);
2423 op2 = gen_reg_rtx (mode);
2425 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2427 /* Compute into op1 and op2. */
2428 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2430 else if (TARGET_HAS_SINCOS)
2432 tree call, fn = NULL_TREE;
2433 tree top1, top2;
2434 rtx op1a, op2a;
2436 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2437 fn = built_in_decls[BUILT_IN_SINCOSF];
2438 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2439 fn = built_in_decls[BUILT_IN_SINCOS];
2440 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2441 fn = built_in_decls[BUILT_IN_SINCOSL];
2442 else
2443 gcc_unreachable ();
2445 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2446 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2447 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2448 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2449 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2450 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2452 /* Make sure not to fold the sincos call again. */
2453 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2454 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2455 call, 3, arg, top1, top2));
2457 else
2459 tree call, fn = NULL_TREE, narg;
2460 tree ctype = build_complex_type (type);
2462 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2463 fn = built_in_decls[BUILT_IN_CEXPF];
2464 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2465 fn = built_in_decls[BUILT_IN_CEXP];
2466 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2467 fn = built_in_decls[BUILT_IN_CEXPL];
2468 else
2469 gcc_unreachable ();
2471 /* If we don't have a decl for cexp create one. This is the
2472 friendliest fallback if the user calls __builtin_cexpi
2473 without full target C99 function support. */
2474 if (fn == NULL_TREE)
2476 tree fntype;
2477 const char *name = NULL;
2479 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2480 name = "cexpf";
2481 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2482 name = "cexp";
2483 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2484 name = "cexpl";
2486 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2487 fn = build_fn_decl (name, fntype);
2490 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2491 build_real (type, dconst0), arg);
2493 /* Make sure not to fold the cexp call again. */
2494 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2495 return expand_expr (build_call_nary (ctype, call, 1, narg),
2496 target, VOIDmode, EXPAND_NORMAL);
2499 /* Now build the proper return type. */
2500 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2501 make_tree (TREE_TYPE (arg), op2),
2502 make_tree (TREE_TYPE (arg), op1)),
2503 target, VOIDmode, EXPAND_NORMAL);
2506 /* Conveniently construct a function call expression. FNDECL names the
2507 function to be called, N is the number of arguments, and the "..."
2508 parameters are the argument expressions. Unlike build_call_exr
2509 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2511 static tree
2512 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2514 va_list ap;
2515 tree fntype = TREE_TYPE (fndecl);
2516 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2518 va_start (ap, n);
2519 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2520 va_end (ap);
2521 SET_EXPR_LOCATION (fn, loc);
2522 return fn;
2524 #define build_call_nofold(...) \
2525 build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
2527 /* Expand a call to one of the builtin rounding functions gcc defines
2528 as an extension (lfloor and lceil). As these are gcc extensions we
2529 do not need to worry about setting errno to EDOM.
2530 If expanding via optab fails, lower expression to (int)(floor(x)).
2531 EXP is the expression that is a call to the builtin function;
2532 if convenient, the result should be placed in TARGET. */
2534 static rtx
2535 expand_builtin_int_roundingfn (tree exp, rtx target)
2537 convert_optab builtin_optab;
2538 rtx op0, insns, tmp;
2539 tree fndecl = get_callee_fndecl (exp);
2540 enum built_in_function fallback_fn;
2541 tree fallback_fndecl;
2542 enum machine_mode mode;
2543 tree arg;
2545 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2546 gcc_unreachable ();
2548 arg = CALL_EXPR_ARG (exp, 0);
2550 switch (DECL_FUNCTION_CODE (fndecl))
2552 CASE_FLT_FN (BUILT_IN_LCEIL):
2553 CASE_FLT_FN (BUILT_IN_LLCEIL):
2554 builtin_optab = lceil_optab;
2555 fallback_fn = BUILT_IN_CEIL;
2556 break;
2558 CASE_FLT_FN (BUILT_IN_LFLOOR):
2559 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2560 builtin_optab = lfloor_optab;
2561 fallback_fn = BUILT_IN_FLOOR;
2562 break;
2564 default:
2565 gcc_unreachable ();
2568 /* Make a suitable register to place result in. */
2569 mode = TYPE_MODE (TREE_TYPE (exp));
2571 target = gen_reg_rtx (mode);
2573 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2574 need to expand the argument again. This way, we will not perform
2575 side-effects more the once. */
2576 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2578 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2580 start_sequence ();
2582 /* Compute into TARGET. */
2583 if (expand_sfix_optab (target, op0, builtin_optab))
2585 /* Output the entire sequence. */
2586 insns = get_insns ();
2587 end_sequence ();
2588 emit_insn (insns);
2589 return target;
2592 /* If we were unable to expand via the builtin, stop the sequence
2593 (without outputting the insns). */
2594 end_sequence ();
2596 /* Fall back to floating point rounding optab. */
2597 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2599 /* For non-C99 targets we may end up without a fallback fndecl here
2600 if the user called __builtin_lfloor directly. In this case emit
2601 a call to the floor/ceil variants nevertheless. This should result
2602 in the best user experience for not full C99 targets. */
2603 if (fallback_fndecl == NULL_TREE)
2605 tree fntype;
2606 const char *name = NULL;
2608 switch (DECL_FUNCTION_CODE (fndecl))
2610 case BUILT_IN_LCEIL:
2611 case BUILT_IN_LLCEIL:
2612 name = "ceil";
2613 break;
2614 case BUILT_IN_LCEILF:
2615 case BUILT_IN_LLCEILF:
2616 name = "ceilf";
2617 break;
2618 case BUILT_IN_LCEILL:
2619 case BUILT_IN_LLCEILL:
2620 name = "ceill";
2621 break;
2622 case BUILT_IN_LFLOOR:
2623 case BUILT_IN_LLFLOOR:
2624 name = "floor";
2625 break;
2626 case BUILT_IN_LFLOORF:
2627 case BUILT_IN_LLFLOORF:
2628 name = "floorf";
2629 break;
2630 case BUILT_IN_LFLOORL:
2631 case BUILT_IN_LLFLOORL:
2632 name = "floorl";
2633 break;
2634 default:
2635 gcc_unreachable ();
2638 fntype = build_function_type_list (TREE_TYPE (arg),
2639 TREE_TYPE (arg), NULL_TREE);
2640 fallback_fndecl = build_fn_decl (name, fntype);
2643 exp = build_call_nofold (fallback_fndecl, 1, arg);
2645 tmp = expand_normal (exp);
2647 /* Truncate the result of floating point optab to integer
2648 via expand_fix (). */
2649 target = gen_reg_rtx (mode);
2650 expand_fix (target, tmp, 0);
2652 return target;
2655 /* Expand a call to one of the builtin math functions doing integer
2656 conversion (lrint).
2657 Return 0 if a normal call should be emitted rather than expanding the
2658 function in-line. EXP is the expression that is a call to the builtin
2659 function; if convenient, the result should be placed in TARGET. */
2661 static rtx
2662 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2664 convert_optab builtin_optab;
2665 rtx op0, insns;
2666 tree fndecl = get_callee_fndecl (exp);
2667 tree arg;
2668 enum machine_mode mode;
2670 /* There's no easy way to detect the case we need to set EDOM. */
2671 if (flag_errno_math)
2672 return NULL_RTX;
2674 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2675 gcc_unreachable ();
2677 arg = CALL_EXPR_ARG (exp, 0);
2679 switch (DECL_FUNCTION_CODE (fndecl))
2681 CASE_FLT_FN (BUILT_IN_LRINT):
2682 CASE_FLT_FN (BUILT_IN_LLRINT):
2683 builtin_optab = lrint_optab; break;
2684 CASE_FLT_FN (BUILT_IN_LROUND):
2685 CASE_FLT_FN (BUILT_IN_LLROUND):
2686 builtin_optab = lround_optab; break;
2687 default:
2688 gcc_unreachable ();
2691 /* Make a suitable register to place result in. */
2692 mode = TYPE_MODE (TREE_TYPE (exp));
2694 target = gen_reg_rtx (mode);
2696 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2697 need to expand the argument again. This way, we will not perform
2698 side-effects more the once. */
2699 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2701 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2703 start_sequence ();
2705 if (expand_sfix_optab (target, op0, builtin_optab))
2707 /* Output the entire sequence. */
2708 insns = get_insns ();
2709 end_sequence ();
2710 emit_insn (insns);
2711 return target;
2714 /* If we were unable to expand via the builtin, stop the sequence
2715 (without outputting the insns) and call to the library function
2716 with the stabilized argument list. */
2717 end_sequence ();
2719 target = expand_call (exp, target, target == const0_rtx);
2721 return target;
2724 /* To evaluate powi(x,n), the floating point value x raised to the
2725 constant integer exponent n, we use a hybrid algorithm that
2726 combines the "window method" with look-up tables. For an
2727 introduction to exponentiation algorithms and "addition chains",
2728 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2729 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2730 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2731 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2733 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2734 multiplications to inline before calling the system library's pow
2735 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2736 so this default never requires calling pow, powf or powl. */
2738 #ifndef POWI_MAX_MULTS
2739 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2740 #endif
2742 /* The size of the "optimal power tree" lookup table. All
2743 exponents less than this value are simply looked up in the
2744 powi_table below. This threshold is also used to size the
2745 cache of pseudo registers that hold intermediate results. */
2746 #define POWI_TABLE_SIZE 256
2748 /* The size, in bits of the window, used in the "window method"
2749 exponentiation algorithm. This is equivalent to a radix of
2750 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2751 #define POWI_WINDOW_SIZE 3
2753 /* The following table is an efficient representation of an
2754 "optimal power tree". For each value, i, the corresponding
2755 value, j, in the table states than an optimal evaluation
2756 sequence for calculating pow(x,i) can be found by evaluating
2757 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2758 100 integers is given in Knuth's "Seminumerical algorithms". */
2760 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2762 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2763 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2764 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2765 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2766 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2767 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2768 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2769 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2770 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2771 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2772 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2773 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2774 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2775 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2776 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2777 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2778 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2779 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2780 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2781 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2782 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2783 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2784 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2785 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2786 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2787 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2788 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2789 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2790 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2791 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2792 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2793 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2797 /* Return the number of multiplications required to calculate
2798 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2799 subroutine of powi_cost. CACHE is an array indicating
2800 which exponents have already been calculated. */
2802 static int
2803 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2805 /* If we've already calculated this exponent, then this evaluation
2806 doesn't require any additional multiplications. */
2807 if (cache[n])
2808 return 0;
2810 cache[n] = true;
2811 return powi_lookup_cost (n - powi_table[n], cache)
2812 + powi_lookup_cost (powi_table[n], cache) + 1;
2815 /* Return the number of multiplications required to calculate
2816 powi(x,n) for an arbitrary x, given the exponent N. This
2817 function needs to be kept in sync with expand_powi below. */
2819 static int
2820 powi_cost (HOST_WIDE_INT n)
2822 bool cache[POWI_TABLE_SIZE];
2823 unsigned HOST_WIDE_INT digit;
2824 unsigned HOST_WIDE_INT val;
2825 int result;
2827 if (n == 0)
2828 return 0;
2830 /* Ignore the reciprocal when calculating the cost. */
2831 val = (n < 0) ? -n : n;
2833 /* Initialize the exponent cache. */
2834 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2835 cache[1] = true;
2837 result = 0;
2839 while (val >= POWI_TABLE_SIZE)
2841 if (val & 1)
2843 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2844 result += powi_lookup_cost (digit, cache)
2845 + POWI_WINDOW_SIZE + 1;
2846 val >>= POWI_WINDOW_SIZE;
2848 else
2850 val >>= 1;
2851 result++;
2855 return result + powi_lookup_cost (val, cache);
2858 /* Recursive subroutine of expand_powi. This function takes the array,
2859 CACHE, of already calculated exponents and an exponent N and returns
2860 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2862 static rtx
2863 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2865 unsigned HOST_WIDE_INT digit;
2866 rtx target, result;
2867 rtx op0, op1;
2869 if (n < POWI_TABLE_SIZE)
2871 if (cache[n])
2872 return cache[n];
2874 target = gen_reg_rtx (mode);
2875 cache[n] = target;
2877 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2878 op1 = expand_powi_1 (mode, powi_table[n], cache);
2880 else if (n & 1)
2882 target = gen_reg_rtx (mode);
2883 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2884 op0 = expand_powi_1 (mode, n - digit, cache);
2885 op1 = expand_powi_1 (mode, digit, cache);
2887 else
2889 target = gen_reg_rtx (mode);
2890 op0 = expand_powi_1 (mode, n >> 1, cache);
2891 op1 = op0;
2894 result = expand_mult (mode, op0, op1, target, 0);
2895 if (result != target)
2896 emit_move_insn (target, result);
2897 return target;
2900 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2901 floating point operand in mode MODE, and N is the exponent. This
2902 function needs to be kept in sync with powi_cost above. */
2904 static rtx
2905 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2907 rtx cache[POWI_TABLE_SIZE];
2908 rtx result;
2910 if (n == 0)
2911 return CONST1_RTX (mode);
2913 memset (cache, 0, sizeof (cache));
2914 cache[1] = x;
2916 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2918 /* If the original exponent was negative, reciprocate the result. */
2919 if (n < 0)
2920 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2921 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2923 return result;
2926 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
2927 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
2928 if we can simplify it. */
2929 static rtx
2930 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
2931 rtx subtarget)
2933 if (TREE_CODE (arg1) == REAL_CST
2934 && !TREE_OVERFLOW (arg1)
2935 && flag_unsafe_math_optimizations)
2937 enum machine_mode mode = TYPE_MODE (type);
2938 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
2939 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
2940 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
2941 tree op = NULL_TREE;
2943 if (sqrtfn)
2945 /* Optimize pow (x, 0.5) into sqrt. */
2946 if (REAL_VALUES_EQUAL (c, dconsthalf))
2947 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2949 else
2951 REAL_VALUE_TYPE dconst1_4 = dconst1;
2952 REAL_VALUE_TYPE dconst3_4;
2953 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
2955 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
2956 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
2958 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
2959 machines that a builtin sqrt instruction is smaller than a
2960 call to pow with 0.25, so do this optimization even if
2961 -Os. */
2962 if (REAL_VALUES_EQUAL (c, dconst1_4))
2964 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
2965 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
2968 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
2969 are optimizing for space. */
2970 else if (optimize_insn_for_speed_p ()
2971 && !TREE_SIDE_EFFECTS (arg0)
2972 && REAL_VALUES_EQUAL (c, dconst3_4))
2974 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
2975 tree sqrt2 = builtin_save_expr (sqrt1);
2976 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
2977 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
2982 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
2983 cbrt/sqrts instead of pow (x, 1./6.). */
2984 if (cbrtfn && ! op
2985 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
2987 /* First try 1/3. */
2988 REAL_VALUE_TYPE dconst1_3
2989 = real_value_truncate (mode, dconst_third ());
2991 if (REAL_VALUES_EQUAL (c, dconst1_3))
2992 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
2994 /* Now try 1/6. */
2995 else if (optimize_insn_for_speed_p ())
2997 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
2998 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
3000 if (REAL_VALUES_EQUAL (c, dconst1_6))
3002 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3003 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3008 if (op)
3009 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3012 return NULL_RTX;
3015 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3016 a normal call should be emitted rather than expanding the function
3017 in-line. EXP is the expression that is a call to the builtin
3018 function; if convenient, the result should be placed in TARGET. */
3020 static rtx
3021 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3023 tree arg0, arg1;
3024 tree fn, narg0;
3025 tree type = TREE_TYPE (exp);
3026 REAL_VALUE_TYPE cint, c, c2;
3027 HOST_WIDE_INT n;
3028 rtx op, op2;
3029 enum machine_mode mode = TYPE_MODE (type);
3031 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3032 return NULL_RTX;
3034 arg0 = CALL_EXPR_ARG (exp, 0);
3035 arg1 = CALL_EXPR_ARG (exp, 1);
3037 if (TREE_CODE (arg1) != REAL_CST
3038 || TREE_OVERFLOW (arg1))
3039 return expand_builtin_mathfn_2 (exp, target, subtarget);
3041 /* Handle constant exponents. */
3043 /* For integer valued exponents we can expand to an optimal multiplication
3044 sequence using expand_powi. */
3045 c = TREE_REAL_CST (arg1);
3046 n = real_to_integer (&c);
3047 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3048 if (real_identical (&c, &cint)
3049 && ((n >= -1 && n <= 2)
3050 || (flag_unsafe_math_optimizations
3051 && optimize_insn_for_speed_p ()
3052 && powi_cost (n) <= POWI_MAX_MULTS)))
3054 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3055 if (n != 1)
3057 op = force_reg (mode, op);
3058 op = expand_powi (op, mode, n);
3060 return op;
3063 narg0 = builtin_save_expr (arg0);
3065 /* If the exponent is not integer valued, check if it is half of an integer.
3066 In this case we can expand to sqrt (x) * x**(n/2). */
3067 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3068 if (fn != NULL_TREE)
3070 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3071 n = real_to_integer (&c2);
3072 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3073 if (real_identical (&c2, &cint)
3074 && ((flag_unsafe_math_optimizations
3075 && optimize_insn_for_speed_p ()
3076 && powi_cost (n/2) <= POWI_MAX_MULTS)
3077 /* Even the c == 0.5 case cannot be done unconditionally
3078 when we need to preserve signed zeros, as
3079 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3080 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3081 /* For c == 1.5 we can assume that x * sqrt (x) is always
3082 smaller than pow (x, 1.5) if sqrt will not be expanded
3083 as a call. */
3084 || (n == 3
3085 && (optab_handler (sqrt_optab, mode)->insn_code
3086 != CODE_FOR_nothing))))
3088 tree call_expr = build_call_nofold (fn, 1, narg0);
3089 /* Use expand_expr in case the newly built call expression
3090 was folded to a non-call. */
3091 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3092 if (n != 1)
3094 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3095 op2 = force_reg (mode, op2);
3096 op2 = expand_powi (op2, mode, abs (n / 2));
3097 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3098 0, OPTAB_LIB_WIDEN);
3099 /* If the original exponent was negative, reciprocate the
3100 result. */
3101 if (n < 0)
3102 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3103 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3105 return op;
3109 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3110 call. */
3111 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3112 subtarget);
3113 if (op)
3114 return op;
3116 /* Try if the exponent is a third of an integer. In this case
3117 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3118 different from pow (x, 1./3.) due to rounding and behavior
3119 with negative x we need to constrain this transformation to
3120 unsafe math and positive x or finite math. */
3121 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3122 if (fn != NULL_TREE
3123 && flag_unsafe_math_optimizations
3124 && (tree_expr_nonnegative_p (arg0)
3125 || !HONOR_NANS (mode)))
3127 REAL_VALUE_TYPE dconst3;
3128 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3129 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3130 real_round (&c2, mode, &c2);
3131 n = real_to_integer (&c2);
3132 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3133 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3134 real_convert (&c2, mode, &c2);
3135 if (real_identical (&c2, &c)
3136 && ((optimize_insn_for_speed_p ()
3137 && powi_cost (n/3) <= POWI_MAX_MULTS)
3138 || n == 1))
3140 tree call_expr = build_call_nofold (fn, 1,narg0);
3141 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3142 if (abs (n) % 3 == 2)
3143 op = expand_simple_binop (mode, MULT, op, op, op,
3144 0, OPTAB_LIB_WIDEN);
3145 if (n != 1)
3147 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3148 op2 = force_reg (mode, op2);
3149 op2 = expand_powi (op2, mode, abs (n / 3));
3150 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3151 0, OPTAB_LIB_WIDEN);
3152 /* If the original exponent was negative, reciprocate the
3153 result. */
3154 if (n < 0)
3155 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3156 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3158 return op;
3162 /* Fall back to optab expansion. */
3163 return expand_builtin_mathfn_2 (exp, target, subtarget);
3166 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3167 a normal call should be emitted rather than expanding the function
3168 in-line. EXP is the expression that is a call to the builtin
3169 function; if convenient, the result should be placed in TARGET. */
3171 static rtx
3172 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3174 tree arg0, arg1;
3175 rtx op0, op1;
3176 enum machine_mode mode;
3177 enum machine_mode mode2;
3179 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3180 return NULL_RTX;
3182 arg0 = CALL_EXPR_ARG (exp, 0);
3183 arg1 = CALL_EXPR_ARG (exp, 1);
3184 mode = TYPE_MODE (TREE_TYPE (exp));
3186 /* Handle constant power. */
3188 if (TREE_CODE (arg1) == INTEGER_CST
3189 && !TREE_OVERFLOW (arg1))
3191 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3193 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3194 Otherwise, check the number of multiplications required. */
3195 if ((TREE_INT_CST_HIGH (arg1) == 0
3196 || TREE_INT_CST_HIGH (arg1) == -1)
3197 && ((n >= -1 && n <= 2)
3198 || (optimize_insn_for_speed_p ()
3199 && powi_cost (n) <= POWI_MAX_MULTS)))
3201 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3202 op0 = force_reg (mode, op0);
3203 return expand_powi (op0, mode, n);
3207 /* Emit a libcall to libgcc. */
3209 /* Mode of the 2nd argument must match that of an int. */
3210 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3212 if (target == NULL_RTX)
3213 target = gen_reg_rtx (mode);
3215 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3216 if (GET_MODE (op0) != mode)
3217 op0 = convert_to_mode (mode, op0, 0);
3218 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3219 if (GET_MODE (op1) != mode2)
3220 op1 = convert_to_mode (mode2, op1, 0);
3222 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3223 target, LCT_CONST, mode, 2,
3224 op0, mode, op1, mode2);
3226 return target;
3229 /* Expand expression EXP which is a call to the strlen builtin. Return
3230 NULL_RTX if we failed the caller should emit a normal call, otherwise
3231 try to get the result in TARGET, if convenient. */
3233 static rtx
3234 expand_builtin_strlen (tree exp, rtx target,
3235 enum machine_mode target_mode)
3237 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3238 return NULL_RTX;
3239 else
3241 rtx pat;
3242 tree len;
3243 tree src = CALL_EXPR_ARG (exp, 0);
3244 rtx result, src_reg, char_rtx, before_strlen;
3245 enum machine_mode insn_mode = target_mode, char_mode;
3246 enum insn_code icode = CODE_FOR_nothing;
3247 int align;
3249 /* If the length can be computed at compile-time, return it. */
3250 len = c_strlen (src, 0);
3251 if (len)
3252 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3254 /* If the length can be computed at compile-time and is constant
3255 integer, but there are side-effects in src, evaluate
3256 src for side-effects, then return len.
3257 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3258 can be optimized into: i++; x = 3; */
3259 len = c_strlen (src, 1);
3260 if (len && TREE_CODE (len) == INTEGER_CST)
3262 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3263 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3266 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3268 /* If SRC is not a pointer type, don't do this operation inline. */
3269 if (align == 0)
3270 return NULL_RTX;
3272 /* Bail out if we can't compute strlen in the right mode. */
3273 while (insn_mode != VOIDmode)
3275 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3276 if (icode != CODE_FOR_nothing)
3277 break;
3279 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3281 if (insn_mode == VOIDmode)
3282 return NULL_RTX;
3284 /* Make a place to write the result of the instruction. */
3285 result = target;
3286 if (! (result != 0
3287 && REG_P (result)
3288 && GET_MODE (result) == insn_mode
3289 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3290 result = gen_reg_rtx (insn_mode);
3292 /* Make a place to hold the source address. We will not expand
3293 the actual source until we are sure that the expansion will
3294 not fail -- there are trees that cannot be expanded twice. */
3295 src_reg = gen_reg_rtx (Pmode);
3297 /* Mark the beginning of the strlen sequence so we can emit the
3298 source operand later. */
3299 before_strlen = get_last_insn ();
3301 char_rtx = const0_rtx;
3302 char_mode = insn_data[(int) icode].operand[2].mode;
3303 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3304 char_mode))
3305 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3307 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3308 char_rtx, GEN_INT (align));
3309 if (! pat)
3310 return NULL_RTX;
3311 emit_insn (pat);
3313 /* Now that we are assured of success, expand the source. */
3314 start_sequence ();
3315 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3316 if (pat != src_reg)
3317 emit_move_insn (src_reg, pat);
3318 pat = get_insns ();
3319 end_sequence ();
3321 if (before_strlen)
3322 emit_insn_after (pat, before_strlen);
3323 else
3324 emit_insn_before (pat, get_insns ());
3326 /* Return the value in the proper mode for this function. */
3327 if (GET_MODE (result) == target_mode)
3328 target = result;
3329 else if (target != 0)
3330 convert_move (target, result, 0);
3331 else
3332 target = convert_to_mode (target_mode, result, 0);
3334 return target;
3338 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3339 bytes from constant string DATA + OFFSET and return it as target
3340 constant. */
3342 static rtx
3343 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3344 enum machine_mode mode)
3346 const char *str = (const char *) data;
3348 gcc_assert (offset >= 0
3349 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3350 <= strlen (str) + 1));
3352 return c_readstr (str + offset, mode);
3355 /* Expand a call EXP to the memcpy builtin.
3356 Return NULL_RTX if we failed, the caller should emit a normal call,
3357 otherwise try to get the result in TARGET, if convenient (and in
3358 mode MODE if that's convenient). */
3360 static rtx
3361 expand_builtin_memcpy (tree exp, rtx target)
3363 if (!validate_arglist (exp,
3364 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3365 return NULL_RTX;
3366 else
3368 tree dest = CALL_EXPR_ARG (exp, 0);
3369 tree src = CALL_EXPR_ARG (exp, 1);
3370 tree len = CALL_EXPR_ARG (exp, 2);
3371 const char *src_str;
3372 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3373 unsigned int dest_align
3374 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3375 rtx dest_mem, src_mem, dest_addr, len_rtx;
3376 HOST_WIDE_INT expected_size = -1;
3377 unsigned int expected_align = 0;
3379 /* If DEST is not a pointer type, call the normal function. */
3380 if (dest_align == 0)
3381 return NULL_RTX;
3383 /* If either SRC is not a pointer type, don't do this
3384 operation in-line. */
3385 if (src_align == 0)
3386 return NULL_RTX;
3388 if (currently_expanding_gimple_stmt)
3389 stringop_block_profile (currently_expanding_gimple_stmt,
3390 &expected_align, &expected_size);
3392 if (expected_align < dest_align)
3393 expected_align = dest_align;
3394 dest_mem = get_memory_rtx (dest, len);
3395 set_mem_align (dest_mem, dest_align);
3396 len_rtx = expand_normal (len);
3397 src_str = c_getstr (src);
3399 /* If SRC is a string constant and block move would be done
3400 by pieces, we can avoid loading the string from memory
3401 and only stored the computed constants. */
3402 if (src_str
3403 && CONST_INT_P (len_rtx)
3404 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3405 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3406 CONST_CAST (char *, src_str),
3407 dest_align, false))
3409 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3410 builtin_memcpy_read_str,
3411 CONST_CAST (char *, src_str),
3412 dest_align, false, 0);
3413 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3414 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3415 return dest_mem;
3418 src_mem = get_memory_rtx (src, len);
3419 set_mem_align (src_mem, src_align);
3421 /* Copy word part most expediently. */
3422 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3423 CALL_EXPR_TAILCALL (exp)
3424 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3425 expected_align, expected_size);
3427 if (dest_addr == 0)
3429 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3430 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3432 return dest_addr;
3436 /* Expand a call EXP to the mempcpy builtin.
3437 Return NULL_RTX if we failed; the caller should emit a normal call,
3438 otherwise try to get the result in TARGET, if convenient (and in
3439 mode MODE if that's convenient). If ENDP is 0 return the
3440 destination pointer, if ENDP is 1 return the end pointer ala
3441 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3442 stpcpy. */
3444 static rtx
3445 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3447 if (!validate_arglist (exp,
3448 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3449 return NULL_RTX;
3450 else
3452 tree dest = CALL_EXPR_ARG (exp, 0);
3453 tree src = CALL_EXPR_ARG (exp, 1);
3454 tree len = CALL_EXPR_ARG (exp, 2);
3455 return expand_builtin_mempcpy_args (dest, src, len,
3456 target, mode, /*endp=*/ 1);
3460 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3461 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3462 so that this can also be called without constructing an actual CALL_EXPR.
3463 The other arguments and return value are the same as for
3464 expand_builtin_mempcpy. */
3466 static rtx
3467 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3468 rtx target, enum machine_mode mode, int endp)
3470 /* If return value is ignored, transform mempcpy into memcpy. */
3471 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3473 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3474 tree result = build_call_nofold (fn, 3, dest, src, len);
3475 return expand_expr (result, target, mode, EXPAND_NORMAL);
3477 else
3479 const char *src_str;
3480 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3481 unsigned int dest_align
3482 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3483 rtx dest_mem, src_mem, len_rtx;
3485 /* If either SRC or DEST is not a pointer type, don't do this
3486 operation in-line. */
3487 if (dest_align == 0 || src_align == 0)
3488 return NULL_RTX;
3490 /* If LEN is not constant, call the normal function. */
3491 if (! host_integerp (len, 1))
3492 return NULL_RTX;
3494 len_rtx = expand_normal (len);
3495 src_str = c_getstr (src);
3497 /* If SRC is a string constant and block move would be done
3498 by pieces, we can avoid loading the string from memory
3499 and only stored the computed constants. */
3500 if (src_str
3501 && CONST_INT_P (len_rtx)
3502 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3503 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3504 CONST_CAST (char *, src_str),
3505 dest_align, false))
3507 dest_mem = get_memory_rtx (dest, len);
3508 set_mem_align (dest_mem, dest_align);
3509 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3510 builtin_memcpy_read_str,
3511 CONST_CAST (char *, src_str),
3512 dest_align, false, endp);
3513 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3514 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3515 return dest_mem;
3518 if (CONST_INT_P (len_rtx)
3519 && can_move_by_pieces (INTVAL (len_rtx),
3520 MIN (dest_align, src_align)))
3522 dest_mem = get_memory_rtx (dest, len);
3523 set_mem_align (dest_mem, dest_align);
3524 src_mem = get_memory_rtx (src, len);
3525 set_mem_align (src_mem, src_align);
3526 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3527 MIN (dest_align, src_align), endp);
3528 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3529 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3530 return dest_mem;
3533 return NULL_RTX;
3537 #ifndef HAVE_movstr
3538 # define HAVE_movstr 0
3539 # define CODE_FOR_movstr CODE_FOR_nothing
3540 #endif
3542 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3543 we failed, the caller should emit a normal call, otherwise try to
3544 get the result in TARGET, if convenient. If ENDP is 0 return the
3545 destination pointer, if ENDP is 1 return the end pointer ala
3546 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3547 stpcpy. */
3549 static rtx
3550 expand_movstr (tree dest, tree src, rtx target, int endp)
3552 rtx end;
3553 rtx dest_mem;
3554 rtx src_mem;
3555 rtx insn;
3556 const struct insn_data * data;
3558 if (!HAVE_movstr)
3559 return NULL_RTX;
3561 dest_mem = get_memory_rtx (dest, NULL);
3562 src_mem = get_memory_rtx (src, NULL);
3563 if (!endp)
3565 target = force_reg (Pmode, XEXP (dest_mem, 0));
3566 dest_mem = replace_equiv_address (dest_mem, target);
3567 end = gen_reg_rtx (Pmode);
3569 else
3571 if (target == 0 || target == const0_rtx)
3573 end = gen_reg_rtx (Pmode);
3574 if (target == 0)
3575 target = end;
3577 else
3578 end = target;
3581 data = insn_data + CODE_FOR_movstr;
3583 if (data->operand[0].mode != VOIDmode)
3584 end = gen_lowpart (data->operand[0].mode, end);
3586 insn = data->genfun (end, dest_mem, src_mem);
3588 gcc_assert (insn);
3590 emit_insn (insn);
3592 /* movstr is supposed to set end to the address of the NUL
3593 terminator. If the caller requested a mempcpy-like return value,
3594 adjust it. */
3595 if (endp == 1 && target != const0_rtx)
3597 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3598 emit_move_insn (target, force_operand (tem, NULL_RTX));
3601 return target;
3604 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3605 NULL_RTX if we failed the caller should emit a normal call, otherwise
3606 try to get the result in TARGET, if convenient (and in mode MODE if that's
3607 convenient). */
3609 static rtx
3610 expand_builtin_strcpy (tree exp, rtx target)
3612 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3614 tree dest = CALL_EXPR_ARG (exp, 0);
3615 tree src = CALL_EXPR_ARG (exp, 1);
3616 return expand_builtin_strcpy_args (dest, src, target);
3618 return NULL_RTX;
3621 /* Helper function to do the actual work for expand_builtin_strcpy. The
3622 arguments to the builtin_strcpy call DEST and SRC are broken out
3623 so that this can also be called without constructing an actual CALL_EXPR.
3624 The other arguments and return value are the same as for
3625 expand_builtin_strcpy. */
3627 static rtx
3628 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3630 return expand_movstr (dest, src, target, /*endp=*/0);
3633 /* Expand a call EXP to the stpcpy builtin.
3634 Return NULL_RTX if we failed the caller should emit a normal call,
3635 otherwise try to get the result in TARGET, if convenient (and in
3636 mode MODE if that's convenient). */
3638 static rtx
3639 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3641 tree dst, src;
3642 location_t loc = EXPR_LOCATION (exp);
3644 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3645 return NULL_RTX;
3647 dst = CALL_EXPR_ARG (exp, 0);
3648 src = CALL_EXPR_ARG (exp, 1);
3650 /* If return value is ignored, transform stpcpy into strcpy. */
3651 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3653 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3654 tree result = build_call_nofold (fn, 2, dst, src);
3655 return expand_expr (result, target, mode, EXPAND_NORMAL);
3657 else
3659 tree len, lenp1;
3660 rtx ret;
3662 /* Ensure we get an actual string whose length can be evaluated at
3663 compile-time, not an expression containing a string. This is
3664 because the latter will potentially produce pessimized code
3665 when used to produce the return value. */
3666 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3667 return expand_movstr (dst, src, target, /*endp=*/2);
3669 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3670 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3671 target, mode, /*endp=*/2);
3673 if (ret)
3674 return ret;
3676 if (TREE_CODE (len) == INTEGER_CST)
3678 rtx len_rtx = expand_normal (len);
3680 if (CONST_INT_P (len_rtx))
3682 ret = expand_builtin_strcpy_args (dst, src, target);
3684 if (ret)
3686 if (! target)
3688 if (mode != VOIDmode)
3689 target = gen_reg_rtx (mode);
3690 else
3691 target = gen_reg_rtx (GET_MODE (ret));
3693 if (GET_MODE (target) != GET_MODE (ret))
3694 ret = gen_lowpart (GET_MODE (target), ret);
3696 ret = plus_constant (ret, INTVAL (len_rtx));
3697 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3698 gcc_assert (ret);
3700 return target;
3705 return expand_movstr (dst, src, target, /*endp=*/2);
3709 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3710 bytes from constant string DATA + OFFSET and return it as target
3711 constant. */
3714 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3715 enum machine_mode mode)
3717 const char *str = (const char *) data;
3719 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3720 return const0_rtx;
3722 return c_readstr (str + offset, mode);
3725 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3726 NULL_RTX if we failed the caller should emit a normal call. */
3728 static rtx
3729 expand_builtin_strncpy (tree exp, rtx target)
3731 location_t loc = EXPR_LOCATION (exp);
3733 if (validate_arglist (exp,
3734 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3736 tree dest = CALL_EXPR_ARG (exp, 0);
3737 tree src = CALL_EXPR_ARG (exp, 1);
3738 tree len = CALL_EXPR_ARG (exp, 2);
3739 tree slen = c_strlen (src, 1);
3741 /* We must be passed a constant len and src parameter. */
3742 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3743 return NULL_RTX;
3745 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3747 /* We're required to pad with trailing zeros if the requested
3748 len is greater than strlen(s2)+1. In that case try to
3749 use store_by_pieces, if it fails, punt. */
3750 if (tree_int_cst_lt (slen, len))
3752 unsigned int dest_align
3753 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3754 const char *p = c_getstr (src);
3755 rtx dest_mem;
3757 if (!p || dest_align == 0 || !host_integerp (len, 1)
3758 || !can_store_by_pieces (tree_low_cst (len, 1),
3759 builtin_strncpy_read_str,
3760 CONST_CAST (char *, p),
3761 dest_align, false))
3762 return NULL_RTX;
3764 dest_mem = get_memory_rtx (dest, len);
3765 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3766 builtin_strncpy_read_str,
3767 CONST_CAST (char *, p), dest_align, false, 0);
3768 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3769 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3770 return dest_mem;
3773 return NULL_RTX;
3776 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3777 bytes from constant string DATA + OFFSET and return it as target
3778 constant. */
3781 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3782 enum machine_mode mode)
3784 const char *c = (const char *) data;
3785 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3787 memset (p, *c, GET_MODE_SIZE (mode));
3789 return c_readstr (p, mode);
3792 /* Callback routine for store_by_pieces. Return the RTL of a register
3793 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3794 char value given in the RTL register data. For example, if mode is
3795 4 bytes wide, return the RTL for 0x01010101*data. */
3797 static rtx
3798 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3799 enum machine_mode mode)
3801 rtx target, coeff;
3802 size_t size;
3803 char *p;
3805 size = GET_MODE_SIZE (mode);
3806 if (size == 1)
3807 return (rtx) data;
3809 p = XALLOCAVEC (char, size);
3810 memset (p, 1, size);
3811 coeff = c_readstr (p, mode);
3813 target = convert_to_mode (mode, (rtx) data, 1);
3814 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3815 return force_reg (mode, target);
3818 /* Expand expression EXP, which is a call to the memset builtin. Return
3819 NULL_RTX if we failed the caller should emit a normal call, otherwise
3820 try to get the result in TARGET, if convenient (and in mode MODE if that's
3821 convenient). */
3823 static rtx
3824 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3826 if (!validate_arglist (exp,
3827 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3828 return NULL_RTX;
3829 else
3831 tree dest = CALL_EXPR_ARG (exp, 0);
3832 tree val = CALL_EXPR_ARG (exp, 1);
3833 tree len = CALL_EXPR_ARG (exp, 2);
3834 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3838 /* Helper function to do the actual work for expand_builtin_memset. The
3839 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3840 so that this can also be called without constructing an actual CALL_EXPR.
3841 The other arguments and return value are the same as for
3842 expand_builtin_memset. */
3844 static rtx
3845 expand_builtin_memset_args (tree dest, tree val, tree len,
3846 rtx target, enum machine_mode mode, tree orig_exp)
3848 tree fndecl, fn;
3849 enum built_in_function fcode;
3850 char c;
3851 unsigned int dest_align;
3852 rtx dest_mem, dest_addr, len_rtx;
3853 HOST_WIDE_INT expected_size = -1;
3854 unsigned int expected_align = 0;
3856 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3858 /* If DEST is not a pointer type, don't do this operation in-line. */
3859 if (dest_align == 0)
3860 return NULL_RTX;
3862 if (currently_expanding_gimple_stmt)
3863 stringop_block_profile (currently_expanding_gimple_stmt,
3864 &expected_align, &expected_size);
3866 if (expected_align < dest_align)
3867 expected_align = dest_align;
3869 /* If the LEN parameter is zero, return DEST. */
3870 if (integer_zerop (len))
3872 /* Evaluate and ignore VAL in case it has side-effects. */
3873 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3874 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3877 /* Stabilize the arguments in case we fail. */
3878 dest = builtin_save_expr (dest);
3879 val = builtin_save_expr (val);
3880 len = builtin_save_expr (len);
3882 len_rtx = expand_normal (len);
3883 dest_mem = get_memory_rtx (dest, len);
3885 if (TREE_CODE (val) != INTEGER_CST)
3887 rtx val_rtx;
3889 val_rtx = expand_normal (val);
3890 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3891 val_rtx, 0);
3893 /* Assume that we can memset by pieces if we can store
3894 * the coefficients by pieces (in the required modes).
3895 * We can't pass builtin_memset_gen_str as that emits RTL. */
3896 c = 1;
3897 if (host_integerp (len, 1)
3898 && can_store_by_pieces (tree_low_cst (len, 1),
3899 builtin_memset_read_str, &c, dest_align,
3900 true))
3902 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3903 val_rtx);
3904 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3905 builtin_memset_gen_str, val_rtx, dest_align,
3906 true, 0);
3908 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3909 dest_align, expected_align,
3910 expected_size))
3911 goto do_libcall;
3913 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3914 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3915 return dest_mem;
3918 if (target_char_cast (val, &c))
3919 goto do_libcall;
3921 if (c)
3923 if (host_integerp (len, 1)
3924 && can_store_by_pieces (tree_low_cst (len, 1),
3925 builtin_memset_read_str, &c, dest_align,
3926 true))
3927 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3928 builtin_memset_read_str, &c, dest_align, true, 0);
3929 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3930 dest_align, expected_align,
3931 expected_size))
3932 goto do_libcall;
3934 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3935 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3936 return dest_mem;
3939 set_mem_align (dest_mem, dest_align);
3940 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3941 CALL_EXPR_TAILCALL (orig_exp)
3942 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3943 expected_align, expected_size);
3945 if (dest_addr == 0)
3947 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3948 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3951 return dest_addr;
3953 do_libcall:
3954 fndecl = get_callee_fndecl (orig_exp);
3955 fcode = DECL_FUNCTION_CODE (fndecl);
3956 if (fcode == BUILT_IN_MEMSET)
3957 fn = build_call_nofold (fndecl, 3, dest, val, len);
3958 else if (fcode == BUILT_IN_BZERO)
3959 fn = build_call_nofold (fndecl, 2, dest, len);
3960 else
3961 gcc_unreachable ();
3962 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3963 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3964 return expand_call (fn, target, target == const0_rtx);
3967 /* Expand expression EXP, which is a call to the bzero builtin. Return
3968 NULL_RTX if we failed the caller should emit a normal call. */
3970 static rtx
3971 expand_builtin_bzero (tree exp)
3973 tree dest, size;
3974 location_t loc = EXPR_LOCATION (exp);
3976 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3977 return NULL_RTX;
3979 dest = CALL_EXPR_ARG (exp, 0);
3980 size = CALL_EXPR_ARG (exp, 1);
3982 /* New argument list transforming bzero(ptr x, int y) to
3983 memset(ptr x, int 0, size_t y). This is done this way
3984 so that if it isn't expanded inline, we fallback to
3985 calling bzero instead of memset. */
3987 return expand_builtin_memset_args (dest, integer_zero_node,
3988 fold_convert_loc (loc, sizetype, size),
3989 const0_rtx, VOIDmode, exp);
3992 /* Expand expression EXP, which is a call to the memcmp built-in function.
3993 Return NULL_RTX if we failed and the
3994 caller should emit a normal call, otherwise try to get the result in
3995 TARGET, if convenient (and in mode MODE, if that's convenient). */
3997 static rtx
3998 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3999 ATTRIBUTE_UNUSED enum machine_mode mode)
4001 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4003 if (!validate_arglist (exp,
4004 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4005 return NULL_RTX;
4007 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4009 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4010 rtx result;
4011 rtx insn;
4012 tree arg1 = CALL_EXPR_ARG (exp, 0);
4013 tree arg2 = CALL_EXPR_ARG (exp, 1);
4014 tree len = CALL_EXPR_ARG (exp, 2);
4016 int arg1_align
4017 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4018 int arg2_align
4019 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4020 enum machine_mode insn_mode;
4022 #ifdef HAVE_cmpmemsi
4023 if (HAVE_cmpmemsi)
4024 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4025 else
4026 #endif
4027 #ifdef HAVE_cmpstrnsi
4028 if (HAVE_cmpstrnsi)
4029 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4030 else
4031 #endif
4032 return NULL_RTX;
4034 /* If we don't have POINTER_TYPE, call the function. */
4035 if (arg1_align == 0 || arg2_align == 0)
4036 return NULL_RTX;
4038 /* Make a place to write the result of the instruction. */
4039 result = target;
4040 if (! (result != 0
4041 && REG_P (result) && GET_MODE (result) == insn_mode
4042 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4043 result = gen_reg_rtx (insn_mode);
4045 arg1_rtx = get_memory_rtx (arg1, len);
4046 arg2_rtx = get_memory_rtx (arg2, len);
4047 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4049 /* Set MEM_SIZE as appropriate. */
4050 if (CONST_INT_P (arg3_rtx))
4052 set_mem_size (arg1_rtx, arg3_rtx);
4053 set_mem_size (arg2_rtx, arg3_rtx);
4056 #ifdef HAVE_cmpmemsi
4057 if (HAVE_cmpmemsi)
4058 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4059 GEN_INT (MIN (arg1_align, arg2_align)));
4060 else
4061 #endif
4062 #ifdef HAVE_cmpstrnsi
4063 if (HAVE_cmpstrnsi)
4064 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4065 GEN_INT (MIN (arg1_align, arg2_align)));
4066 else
4067 #endif
4068 gcc_unreachable ();
4070 if (insn)
4071 emit_insn (insn);
4072 else
4073 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4074 TYPE_MODE (integer_type_node), 3,
4075 XEXP (arg1_rtx, 0), Pmode,
4076 XEXP (arg2_rtx, 0), Pmode,
4077 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4078 TYPE_UNSIGNED (sizetype)),
4079 TYPE_MODE (sizetype));
4081 /* Return the value in the proper mode for this function. */
4082 mode = TYPE_MODE (TREE_TYPE (exp));
4083 if (GET_MODE (result) == mode)
4084 return result;
4085 else if (target != 0)
4087 convert_move (target, result, 0);
4088 return target;
4090 else
4091 return convert_to_mode (mode, result, 0);
4093 #endif
4095 return NULL_RTX;
4098 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4099 if we failed the caller should emit a normal call, otherwise try to get
4100 the result in TARGET, if convenient. */
4102 static rtx
4103 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4105 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4106 return NULL_RTX;
4108 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4109 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4110 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4112 rtx arg1_rtx, arg2_rtx;
4113 rtx result, insn = NULL_RTX;
4114 tree fndecl, fn;
4115 tree arg1 = CALL_EXPR_ARG (exp, 0);
4116 tree arg2 = CALL_EXPR_ARG (exp, 1);
4118 int arg1_align
4119 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4120 int arg2_align
4121 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4123 /* If we don't have POINTER_TYPE, call the function. */
4124 if (arg1_align == 0 || arg2_align == 0)
4125 return NULL_RTX;
4127 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4128 arg1 = builtin_save_expr (arg1);
4129 arg2 = builtin_save_expr (arg2);
4131 arg1_rtx = get_memory_rtx (arg1, NULL);
4132 arg2_rtx = get_memory_rtx (arg2, NULL);
4134 #ifdef HAVE_cmpstrsi
4135 /* Try to call cmpstrsi. */
4136 if (HAVE_cmpstrsi)
4138 enum machine_mode insn_mode
4139 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4141 /* Make a place to write the result of the instruction. */
4142 result = target;
4143 if (! (result != 0
4144 && REG_P (result) && GET_MODE (result) == insn_mode
4145 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4146 result = gen_reg_rtx (insn_mode);
4148 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4149 GEN_INT (MIN (arg1_align, arg2_align)));
4151 #endif
4152 #ifdef HAVE_cmpstrnsi
4153 /* Try to determine at least one length and call cmpstrnsi. */
4154 if (!insn && HAVE_cmpstrnsi)
4156 tree len;
4157 rtx arg3_rtx;
4159 enum machine_mode insn_mode
4160 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4161 tree len1 = c_strlen (arg1, 1);
4162 tree len2 = c_strlen (arg2, 1);
4164 if (len1)
4165 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4166 if (len2)
4167 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4169 /* If we don't have a constant length for the first, use the length
4170 of the second, if we know it. We don't require a constant for
4171 this case; some cost analysis could be done if both are available
4172 but neither is constant. For now, assume they're equally cheap,
4173 unless one has side effects. If both strings have constant lengths,
4174 use the smaller. */
4176 if (!len1)
4177 len = len2;
4178 else if (!len2)
4179 len = len1;
4180 else if (TREE_SIDE_EFFECTS (len1))
4181 len = len2;
4182 else if (TREE_SIDE_EFFECTS (len2))
4183 len = len1;
4184 else if (TREE_CODE (len1) != INTEGER_CST)
4185 len = len2;
4186 else if (TREE_CODE (len2) != INTEGER_CST)
4187 len = len1;
4188 else if (tree_int_cst_lt (len1, len2))
4189 len = len1;
4190 else
4191 len = len2;
4193 /* If both arguments have side effects, we cannot optimize. */
4194 if (!len || TREE_SIDE_EFFECTS (len))
4195 goto do_libcall;
4197 arg3_rtx = expand_normal (len);
4199 /* Make a place to write the result of the instruction. */
4200 result = target;
4201 if (! (result != 0
4202 && REG_P (result) && GET_MODE (result) == insn_mode
4203 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4204 result = gen_reg_rtx (insn_mode);
4206 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4207 GEN_INT (MIN (arg1_align, arg2_align)));
4209 #endif
4211 if (insn)
4213 enum machine_mode mode;
4214 emit_insn (insn);
4216 /* Return the value in the proper mode for this function. */
4217 mode = TYPE_MODE (TREE_TYPE (exp));
4218 if (GET_MODE (result) == mode)
4219 return result;
4220 if (target == 0)
4221 return convert_to_mode (mode, result, 0);
4222 convert_move (target, result, 0);
4223 return target;
4226 /* Expand the library call ourselves using a stabilized argument
4227 list to avoid re-evaluating the function's arguments twice. */
4228 #ifdef HAVE_cmpstrnsi
4229 do_libcall:
4230 #endif
4231 fndecl = get_callee_fndecl (exp);
4232 fn = build_call_nofold (fndecl, 2, arg1, arg2);
4233 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4234 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4235 return expand_call (fn, target, target == const0_rtx);
4237 #endif
4238 return NULL_RTX;
4241 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4242 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4243 the result in TARGET, if convenient. */
4245 static rtx
4246 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4247 ATTRIBUTE_UNUSED enum machine_mode mode)
4249 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4251 if (!validate_arglist (exp,
4252 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4253 return NULL_RTX;
4255 /* If c_strlen can determine an expression for one of the string
4256 lengths, and it doesn't have side effects, then emit cmpstrnsi
4257 using length MIN(strlen(string)+1, arg3). */
4258 #ifdef HAVE_cmpstrnsi
4259 if (HAVE_cmpstrnsi)
4261 tree len, len1, len2;
4262 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4263 rtx result, insn;
4264 tree fndecl, fn;
4265 tree arg1 = CALL_EXPR_ARG (exp, 0);
4266 tree arg2 = CALL_EXPR_ARG (exp, 1);
4267 tree arg3 = CALL_EXPR_ARG (exp, 2);
4269 int arg1_align
4270 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4271 int arg2_align
4272 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4273 enum machine_mode insn_mode
4274 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4276 len1 = c_strlen (arg1, 1);
4277 len2 = c_strlen (arg2, 1);
4279 if (len1)
4280 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4281 if (len2)
4282 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4284 /* If we don't have a constant length for the first, use the length
4285 of the second, if we know it. We don't require a constant for
4286 this case; some cost analysis could be done if both are available
4287 but neither is constant. For now, assume they're equally cheap,
4288 unless one has side effects. If both strings have constant lengths,
4289 use the smaller. */
4291 if (!len1)
4292 len = len2;
4293 else if (!len2)
4294 len = len1;
4295 else if (TREE_SIDE_EFFECTS (len1))
4296 len = len2;
4297 else if (TREE_SIDE_EFFECTS (len2))
4298 len = len1;
4299 else if (TREE_CODE (len1) != INTEGER_CST)
4300 len = len2;
4301 else if (TREE_CODE (len2) != INTEGER_CST)
4302 len = len1;
4303 else if (tree_int_cst_lt (len1, len2))
4304 len = len1;
4305 else
4306 len = len2;
4308 /* If both arguments have side effects, we cannot optimize. */
4309 if (!len || TREE_SIDE_EFFECTS (len))
4310 return NULL_RTX;
4312 /* The actual new length parameter is MIN(len,arg3). */
4313 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4314 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4316 /* If we don't have POINTER_TYPE, call the function. */
4317 if (arg1_align == 0 || arg2_align == 0)
4318 return NULL_RTX;
4320 /* Make a place to write the result of the instruction. */
4321 result = target;
4322 if (! (result != 0
4323 && REG_P (result) && GET_MODE (result) == insn_mode
4324 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4325 result = gen_reg_rtx (insn_mode);
4327 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4328 arg1 = builtin_save_expr (arg1);
4329 arg2 = builtin_save_expr (arg2);
4330 len = builtin_save_expr (len);
4332 arg1_rtx = get_memory_rtx (arg1, len);
4333 arg2_rtx = get_memory_rtx (arg2, len);
4334 arg3_rtx = expand_normal (len);
4335 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4336 GEN_INT (MIN (arg1_align, arg2_align)));
4337 if (insn)
4339 emit_insn (insn);
4341 /* Return the value in the proper mode for this function. */
4342 mode = TYPE_MODE (TREE_TYPE (exp));
4343 if (GET_MODE (result) == mode)
4344 return result;
4345 if (target == 0)
4346 return convert_to_mode (mode, result, 0);
4347 convert_move (target, result, 0);
4348 return target;
4351 /* Expand the library call ourselves using a stabilized argument
4352 list to avoid re-evaluating the function's arguments twice. */
4353 fndecl = get_callee_fndecl (exp);
4354 fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
4355 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4356 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4357 return expand_call (fn, target, target == const0_rtx);
4359 #endif
4360 return NULL_RTX;
4363 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4364 if that's convenient. */
4367 expand_builtin_saveregs (void)
4369 rtx val, seq;
4371 /* Don't do __builtin_saveregs more than once in a function.
4372 Save the result of the first call and reuse it. */
4373 if (saveregs_value != 0)
4374 return saveregs_value;
4376 /* When this function is called, it means that registers must be
4377 saved on entry to this function. So we migrate the call to the
4378 first insn of this function. */
4380 start_sequence ();
4382 /* Do whatever the machine needs done in this case. */
4383 val = targetm.calls.expand_builtin_saveregs ();
4385 seq = get_insns ();
4386 end_sequence ();
4388 saveregs_value = val;
4390 /* Put the insns after the NOTE that starts the function. If this
4391 is inside a start_sequence, make the outer-level insn chain current, so
4392 the code is placed at the start of the function. */
4393 push_topmost_sequence ();
4394 emit_insn_after (seq, entry_of_function ());
4395 pop_topmost_sequence ();
4397 return val;
4400 /* __builtin_args_info (N) returns word N of the arg space info
4401 for the current function. The number and meanings of words
4402 is controlled by the definition of CUMULATIVE_ARGS. */
4404 static rtx
4405 expand_builtin_args_info (tree exp)
4407 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4408 int *word_ptr = (int *) &crtl->args.info;
4410 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4412 if (call_expr_nargs (exp) != 0)
4414 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4415 error ("argument of %<__builtin_args_info%> must be constant");
4416 else
4418 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4420 if (wordnum < 0 || wordnum >= nwords)
4421 error ("argument of %<__builtin_args_info%> out of range");
4422 else
4423 return GEN_INT (word_ptr[wordnum]);
4426 else
4427 error ("missing argument in %<__builtin_args_info%>");
4429 return const0_rtx;
4432 /* Expand a call to __builtin_next_arg. */
4434 static rtx
4435 expand_builtin_next_arg (void)
4437 /* Checking arguments is already done in fold_builtin_next_arg
4438 that must be called before this function. */
4439 return expand_binop (ptr_mode, add_optab,
4440 crtl->args.internal_arg_pointer,
4441 crtl->args.arg_offset_rtx,
4442 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4445 /* Make it easier for the backends by protecting the valist argument
4446 from multiple evaluations. */
4448 static tree
4449 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4451 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4453 gcc_assert (vatype != NULL_TREE);
4455 if (TREE_CODE (vatype) == ARRAY_TYPE)
4457 if (TREE_SIDE_EFFECTS (valist))
4458 valist = save_expr (valist);
4460 /* For this case, the backends will be expecting a pointer to
4461 vatype, but it's possible we've actually been given an array
4462 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4463 So fix it. */
4464 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4466 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4467 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4470 else
4472 tree pt;
4474 if (! needs_lvalue)
4476 if (! TREE_SIDE_EFFECTS (valist))
4477 return valist;
4479 pt = build_pointer_type (vatype);
4480 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4481 TREE_SIDE_EFFECTS (valist) = 1;
4484 if (TREE_SIDE_EFFECTS (valist))
4485 valist = save_expr (valist);
4486 valist = build_fold_indirect_ref_loc (loc, valist);
4489 return valist;
4492 /* The "standard" definition of va_list is void*. */
4494 tree
4495 std_build_builtin_va_list (void)
4497 return ptr_type_node;
4500 /* The "standard" abi va_list is va_list_type_node. */
4502 tree
4503 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4505 return va_list_type_node;
4508 /* The "standard" type of va_list is va_list_type_node. */
4510 tree
4511 std_canonical_va_list_type (tree type)
4513 tree wtype, htype;
4515 if (INDIRECT_REF_P (type))
4516 type = TREE_TYPE (type);
4517 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4518 type = TREE_TYPE (type);
4519 wtype = va_list_type_node;
4520 htype = type;
4521 /* Treat structure va_list types. */
4522 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4523 htype = TREE_TYPE (htype);
4524 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4526 /* If va_list is an array type, the argument may have decayed
4527 to a pointer type, e.g. by being passed to another function.
4528 In that case, unwrap both types so that we can compare the
4529 underlying records. */
4530 if (TREE_CODE (htype) == ARRAY_TYPE
4531 || POINTER_TYPE_P (htype))
4533 wtype = TREE_TYPE (wtype);
4534 htype = TREE_TYPE (htype);
4537 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4538 return va_list_type_node;
4540 return NULL_TREE;
4543 /* The "standard" implementation of va_start: just assign `nextarg' to
4544 the variable. */
4546 void
4547 std_expand_builtin_va_start (tree valist, rtx nextarg)
4549 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4550 convert_move (va_r, nextarg, 0);
4553 /* Expand EXP, a call to __builtin_va_start. */
4555 static rtx
4556 expand_builtin_va_start (tree exp)
4558 rtx nextarg;
4559 tree valist;
4560 location_t loc = EXPR_LOCATION (exp);
4562 if (call_expr_nargs (exp) < 2)
4564 error_at (loc, "too few arguments to function %<va_start%>");
4565 return const0_rtx;
4568 if (fold_builtin_next_arg (exp, true))
4569 return const0_rtx;
4571 nextarg = expand_builtin_next_arg ();
4572 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4574 if (targetm.expand_builtin_va_start)
4575 targetm.expand_builtin_va_start (valist, nextarg);
4576 else
4577 std_expand_builtin_va_start (valist, nextarg);
4579 return const0_rtx;
4582 /* The "standard" implementation of va_arg: read the value from the
4583 current (padded) address and increment by the (padded) size. */
4585 tree
4586 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4587 gimple_seq *post_p)
4589 tree addr, t, type_size, rounded_size, valist_tmp;
4590 unsigned HOST_WIDE_INT align, boundary;
4591 bool indirect;
4593 #ifdef ARGS_GROW_DOWNWARD
4594 /* All of the alignment and movement below is for args-grow-up machines.
4595 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4596 implement their own specialized gimplify_va_arg_expr routines. */
4597 gcc_unreachable ();
4598 #endif
4600 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4601 if (indirect)
4602 type = build_pointer_type (type);
4604 align = PARM_BOUNDARY / BITS_PER_UNIT;
4605 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4607 /* When we align parameter on stack for caller, if the parameter
4608 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4609 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4610 here with caller. */
4611 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4612 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4614 boundary /= BITS_PER_UNIT;
4616 /* Hoist the valist value into a temporary for the moment. */
4617 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4619 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4620 requires greater alignment, we must perform dynamic alignment. */
4621 if (boundary > align
4622 && !integer_zerop (TYPE_SIZE (type)))
4624 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4625 fold_build2 (POINTER_PLUS_EXPR,
4626 TREE_TYPE (valist),
4627 valist_tmp, size_int (boundary - 1)));
4628 gimplify_and_add (t, pre_p);
4630 t = fold_convert (sizetype, valist_tmp);
4631 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4632 fold_convert (TREE_TYPE (valist),
4633 fold_build2 (BIT_AND_EXPR, sizetype, t,
4634 size_int (-boundary))));
4635 gimplify_and_add (t, pre_p);
4637 else
4638 boundary = align;
4640 /* If the actual alignment is less than the alignment of the type,
4641 adjust the type accordingly so that we don't assume strict alignment
4642 when dereferencing the pointer. */
4643 boundary *= BITS_PER_UNIT;
4644 if (boundary < TYPE_ALIGN (type))
4646 type = build_variant_type_copy (type);
4647 TYPE_ALIGN (type) = boundary;
4650 /* Compute the rounded size of the type. */
4651 type_size = size_in_bytes (type);
4652 rounded_size = round_up (type_size, align);
4654 /* Reduce rounded_size so it's sharable with the postqueue. */
4655 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4657 /* Get AP. */
4658 addr = valist_tmp;
4659 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4661 /* Small args are padded downward. */
4662 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4663 rounded_size, size_int (align));
4664 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4665 size_binop (MINUS_EXPR, rounded_size, type_size));
4666 addr = fold_build2 (POINTER_PLUS_EXPR,
4667 TREE_TYPE (addr), addr, t);
4670 /* Compute new value for AP. */
4671 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4672 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4673 gimplify_and_add (t, pre_p);
4675 addr = fold_convert (build_pointer_type (type), addr);
4677 if (indirect)
4678 addr = build_va_arg_indirect_ref (addr);
4680 return build_va_arg_indirect_ref (addr);
4683 /* Build an indirect-ref expression over the given TREE, which represents a
4684 piece of a va_arg() expansion. */
4685 tree
4686 build_va_arg_indirect_ref (tree addr)
4688 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4690 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4691 mf_mark (addr);
4693 return addr;
4696 /* Return a dummy expression of type TYPE in order to keep going after an
4697 error. */
4699 static tree
4700 dummy_object (tree type)
4702 tree t = build_int_cst (build_pointer_type (type), 0);
4703 return build1 (INDIRECT_REF, type, t);
4706 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4707 builtin function, but a very special sort of operator. */
4709 enum gimplify_status
4710 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4712 tree promoted_type, have_va_type;
4713 tree valist = TREE_OPERAND (*expr_p, 0);
4714 tree type = TREE_TYPE (*expr_p);
4715 tree t;
4716 location_t loc = EXPR_LOCATION (*expr_p);
4718 /* Verify that valist is of the proper type. */
4719 have_va_type = TREE_TYPE (valist);
4720 if (have_va_type == error_mark_node)
4721 return GS_ERROR;
4722 have_va_type = targetm.canonical_va_list_type (have_va_type);
4724 if (have_va_type == NULL_TREE)
4726 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4727 return GS_ERROR;
4730 /* Generate a diagnostic for requesting data of a type that cannot
4731 be passed through `...' due to type promotion at the call site. */
4732 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4733 != type)
4735 static bool gave_help;
4736 bool warned;
4738 /* Unfortunately, this is merely undefined, rather than a constraint
4739 violation, so we cannot make this an error. If this call is never
4740 executed, the program is still strictly conforming. */
4741 warned = warning_at (loc, 0,
4742 "%qT is promoted to %qT when passed through %<...%>",
4743 type, promoted_type);
4744 if (!gave_help && warned)
4746 gave_help = true;
4747 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4748 promoted_type, type);
4751 /* We can, however, treat "undefined" any way we please.
4752 Call abort to encourage the user to fix the program. */
4753 if (warned)
4754 inform (loc, "if this code is reached, the program will abort");
4755 /* Before the abort, allow the evaluation of the va_list
4756 expression to exit or longjmp. */
4757 gimplify_and_add (valist, pre_p);
4758 t = build_call_expr_loc (loc,
4759 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4760 gimplify_and_add (t, pre_p);
4762 /* This is dead code, but go ahead and finish so that the
4763 mode of the result comes out right. */
4764 *expr_p = dummy_object (type);
4765 return GS_ALL_DONE;
4767 else
4769 /* Make it easier for the backends by protecting the valist argument
4770 from multiple evaluations. */
4771 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4773 /* For this case, the backends will be expecting a pointer to
4774 TREE_TYPE (abi), but it's possible we've
4775 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4776 So fix it. */
4777 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4779 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4780 valist = fold_convert_loc (loc, p1,
4781 build_fold_addr_expr_loc (loc, valist));
4784 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4786 else
4787 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4789 if (!targetm.gimplify_va_arg_expr)
4790 /* FIXME: Once most targets are converted we should merely
4791 assert this is non-null. */
4792 return GS_ALL_DONE;
4794 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4795 return GS_OK;
4799 /* Expand EXP, a call to __builtin_va_end. */
4801 static rtx
4802 expand_builtin_va_end (tree exp)
4804 tree valist = CALL_EXPR_ARG (exp, 0);
4806 /* Evaluate for side effects, if needed. I hate macros that don't
4807 do that. */
4808 if (TREE_SIDE_EFFECTS (valist))
4809 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4811 return const0_rtx;
4814 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4815 builtin rather than just as an assignment in stdarg.h because of the
4816 nastiness of array-type va_list types. */
4818 static rtx
4819 expand_builtin_va_copy (tree exp)
4821 tree dst, src, t;
4822 location_t loc = EXPR_LOCATION (exp);
4824 dst = CALL_EXPR_ARG (exp, 0);
4825 src = CALL_EXPR_ARG (exp, 1);
4827 dst = stabilize_va_list_loc (loc, dst, 1);
4828 src = stabilize_va_list_loc (loc, src, 0);
4830 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4832 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4834 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4835 TREE_SIDE_EFFECTS (t) = 1;
4836 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4838 else
4840 rtx dstb, srcb, size;
4842 /* Evaluate to pointers. */
4843 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4844 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4845 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4846 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4848 dstb = convert_memory_address (Pmode, dstb);
4849 srcb = convert_memory_address (Pmode, srcb);
4851 /* "Dereference" to BLKmode memories. */
4852 dstb = gen_rtx_MEM (BLKmode, dstb);
4853 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4854 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4855 srcb = gen_rtx_MEM (BLKmode, srcb);
4856 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4857 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4859 /* Copy. */
4860 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4863 return const0_rtx;
4866 /* Expand a call to one of the builtin functions __builtin_frame_address or
4867 __builtin_return_address. */
4869 static rtx
4870 expand_builtin_frame_address (tree fndecl, tree exp)
4872 /* The argument must be a nonnegative integer constant.
4873 It counts the number of frames to scan up the stack.
4874 The value is the return address saved in that frame. */
4875 if (call_expr_nargs (exp) == 0)
4876 /* Warning about missing arg was already issued. */
4877 return const0_rtx;
4878 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4880 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4881 error ("invalid argument to %<__builtin_frame_address%>");
4882 else
4883 error ("invalid argument to %<__builtin_return_address%>");
4884 return const0_rtx;
4886 else
4888 rtx tem
4889 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4890 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4892 /* Some ports cannot access arbitrary stack frames. */
4893 if (tem == NULL)
4895 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4896 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4897 else
4898 warning (0, "unsupported argument to %<__builtin_return_address%>");
4899 return const0_rtx;
4902 /* For __builtin_frame_address, return what we've got. */
4903 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4904 return tem;
4906 if (!REG_P (tem)
4907 && ! CONSTANT_P (tem))
4908 tem = copy_to_mode_reg (Pmode, tem);
4909 return tem;
4913 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4914 we failed and the caller should emit a normal call, otherwise try to get
4915 the result in TARGET, if convenient. */
4917 static rtx
4918 expand_builtin_alloca (tree exp, rtx target)
4920 rtx op0;
4921 rtx result;
4923 /* Emit normal call if marked not-inlineable. */
4924 if (CALL_CANNOT_INLINE_P (exp))
4925 return NULL_RTX;
4927 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4928 return NULL_RTX;
4930 /* Compute the argument. */
4931 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4933 /* Allocate the desired space. */
4934 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4935 result = convert_memory_address (ptr_mode, result);
4937 return result;
4940 /* Expand a call to a bswap builtin with argument ARG0. MODE
4941 is the mode to expand with. */
4943 static rtx
4944 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4946 enum machine_mode mode;
4947 tree arg;
4948 rtx op0;
4950 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4951 return NULL_RTX;
4953 arg = CALL_EXPR_ARG (exp, 0);
4954 mode = TYPE_MODE (TREE_TYPE (arg));
4955 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4957 target = expand_unop (mode, bswap_optab, op0, target, 1);
4959 gcc_assert (target);
4961 return convert_to_mode (mode, target, 0);
4964 /* Expand a call to a unary builtin in EXP.
4965 Return NULL_RTX if a normal call should be emitted rather than expanding the
4966 function in-line. If convenient, the result should be placed in TARGET.
4967 SUBTARGET may be used as the target for computing one of EXP's operands. */
4969 static rtx
4970 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4971 rtx subtarget, optab op_optab)
4973 rtx op0;
4975 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4976 return NULL_RTX;
4978 /* Compute the argument. */
4979 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4980 VOIDmode, EXPAND_NORMAL);
4981 /* Compute op, into TARGET if possible.
4982 Set TARGET to wherever the result comes back. */
4983 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4984 op_optab, op0, target, 1);
4985 gcc_assert (target);
4987 return convert_to_mode (target_mode, target, 0);
4990 /* Expand a call to __builtin_expect. We just return our argument
4991 as the builtin_expect semantic should've been already executed by
4992 tree branch prediction pass. */
4994 static rtx
4995 expand_builtin_expect (tree exp, rtx target)
4997 tree arg;
4999 if (call_expr_nargs (exp) < 2)
5000 return const0_rtx;
5001 arg = CALL_EXPR_ARG (exp, 0);
5003 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5004 /* When guessing was done, the hints should be already stripped away. */
5005 gcc_assert (!flag_guess_branch_prob
5006 || optimize == 0 || errorcount || sorrycount);
5007 return target;
5010 void
5011 expand_builtin_trap (void)
5013 #ifdef HAVE_trap
5014 if (HAVE_trap)
5015 emit_insn (gen_trap ());
5016 else
5017 #endif
5018 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5019 emit_barrier ();
5022 /* Expand a call to __builtin_unreachable. We do nothing except emit
5023 a barrier saying that control flow will not pass here.
5025 It is the responsibility of the program being compiled to ensure
5026 that control flow does never reach __builtin_unreachable. */
5027 static void
5028 expand_builtin_unreachable (void)
5030 emit_barrier ();
5033 /* Expand EXP, a call to fabs, fabsf or fabsl.
5034 Return NULL_RTX if a normal call should be emitted rather than expanding
5035 the function inline. If convenient, the result should be placed
5036 in TARGET. SUBTARGET may be used as the target for computing
5037 the operand. */
5039 static rtx
5040 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5042 enum machine_mode mode;
5043 tree arg;
5044 rtx op0;
5046 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5047 return NULL_RTX;
5049 arg = CALL_EXPR_ARG (exp, 0);
5050 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5051 mode = TYPE_MODE (TREE_TYPE (arg));
5052 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5053 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5056 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5057 Return NULL is a normal call should be emitted rather than expanding the
5058 function inline. If convenient, the result should be placed in TARGET.
5059 SUBTARGET may be used as the target for computing the operand. */
5061 static rtx
5062 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5064 rtx op0, op1;
5065 tree arg;
5067 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5068 return NULL_RTX;
5070 arg = CALL_EXPR_ARG (exp, 0);
5071 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5073 arg = CALL_EXPR_ARG (exp, 1);
5074 op1 = expand_normal (arg);
5076 return expand_copysign (op0, op1, target);
5079 /* Create a new constant string literal and return a char* pointer to it.
5080 The STRING_CST value is the LEN characters at STR. */
5081 tree
5082 build_string_literal (int len, const char *str)
5084 tree t, elem, index, type;
5086 t = build_string (len, str);
5087 elem = build_type_variant (char_type_node, 1, 0);
5088 index = build_index_type (size_int (len - 1));
5089 type = build_array_type (elem, index);
5090 TREE_TYPE (t) = type;
5091 TREE_CONSTANT (t) = 1;
5092 TREE_READONLY (t) = 1;
5093 TREE_STATIC (t) = 1;
5095 type = build_pointer_type (elem);
5096 t = build1 (ADDR_EXPR, type,
5097 build4 (ARRAY_REF, elem,
5098 t, integer_zero_node, NULL_TREE, NULL_TREE));
5099 return t;
5102 /* Expand a call to either the entry or exit function profiler. */
5104 static rtx
5105 expand_builtin_profile_func (bool exitp)
5107 rtx this_rtx, which;
5109 this_rtx = DECL_RTL (current_function_decl);
5110 gcc_assert (MEM_P (this_rtx));
5111 this_rtx = XEXP (this_rtx, 0);
5113 if (exitp)
5114 which = profile_function_exit_libfunc;
5115 else
5116 which = profile_function_entry_libfunc;
5118 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5119 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5121 Pmode);
5123 return const0_rtx;
5126 /* Expand a call to __builtin___clear_cache. */
5128 static rtx
5129 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5131 #ifndef HAVE_clear_cache
5132 #ifdef CLEAR_INSN_CACHE
5133 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5134 does something. Just do the default expansion to a call to
5135 __clear_cache(). */
5136 return NULL_RTX;
5137 #else
5138 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5139 does nothing. There is no need to call it. Do nothing. */
5140 return const0_rtx;
5141 #endif /* CLEAR_INSN_CACHE */
5142 #else
5143 /* We have a "clear_cache" insn, and it will handle everything. */
5144 tree begin, end;
5145 rtx begin_rtx, end_rtx;
5146 enum insn_code icode;
5148 /* We must not expand to a library call. If we did, any
5149 fallback library function in libgcc that might contain a call to
5150 __builtin___clear_cache() would recurse infinitely. */
5151 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5153 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5154 return const0_rtx;
5157 if (HAVE_clear_cache)
5159 icode = CODE_FOR_clear_cache;
5161 begin = CALL_EXPR_ARG (exp, 0);
5162 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5163 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5164 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5165 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5167 end = CALL_EXPR_ARG (exp, 1);
5168 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5169 end_rtx = convert_memory_address (Pmode, end_rtx);
5170 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5171 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5173 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5175 return const0_rtx;
5176 #endif /* HAVE_clear_cache */
5179 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5181 static rtx
5182 round_trampoline_addr (rtx tramp)
5184 rtx temp, addend, mask;
5186 /* If we don't need too much alignment, we'll have been guaranteed
5187 proper alignment by get_trampoline_type. */
5188 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5189 return tramp;
5191 /* Round address up to desired boundary. */
5192 temp = gen_reg_rtx (Pmode);
5193 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5194 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5196 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5197 temp, 0, OPTAB_LIB_WIDEN);
5198 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5199 temp, 0, OPTAB_LIB_WIDEN);
5201 return tramp;
5204 static rtx
5205 expand_builtin_init_trampoline (tree exp)
5207 tree t_tramp, t_func, t_chain;
5208 rtx m_tramp, r_tramp, r_chain, tmp;
5210 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5211 POINTER_TYPE, VOID_TYPE))
5212 return NULL_RTX;
5214 t_tramp = CALL_EXPR_ARG (exp, 0);
5215 t_func = CALL_EXPR_ARG (exp, 1);
5216 t_chain = CALL_EXPR_ARG (exp, 2);
5218 r_tramp = expand_normal (t_tramp);
5219 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5220 MEM_NOTRAP_P (m_tramp) = 1;
5222 /* The TRAMP argument should be the address of a field within the
5223 local function's FRAME decl. Let's see if we can fill in the
5224 to fill in the MEM_ATTRs for this memory. */
5225 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5226 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5227 true, 0);
5229 tmp = round_trampoline_addr (r_tramp);
5230 if (tmp != r_tramp)
5232 m_tramp = change_address (m_tramp, BLKmode, tmp);
5233 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5234 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5237 /* The FUNC argument should be the address of the nested function.
5238 Extract the actual function decl to pass to the hook. */
5239 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5240 t_func = TREE_OPERAND (t_func, 0);
5241 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5243 r_chain = expand_normal (t_chain);
5245 /* Generate insns to initialize the trampoline. */
5246 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5248 trampolines_created = 1;
5249 return const0_rtx;
5252 static rtx
5253 expand_builtin_adjust_trampoline (tree exp)
5255 rtx tramp;
5257 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5258 return NULL_RTX;
5260 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5261 tramp = round_trampoline_addr (tramp);
5262 if (targetm.calls.trampoline_adjust_address)
5263 tramp = targetm.calls.trampoline_adjust_address (tramp);
5265 return tramp;
5268 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5269 function. The function first checks whether the back end provides
5270 an insn to implement signbit for the respective mode. If not, it
5271 checks whether the floating point format of the value is such that
5272 the sign bit can be extracted. If that is not the case, the
5273 function returns NULL_RTX to indicate that a normal call should be
5274 emitted rather than expanding the function in-line. EXP is the
5275 expression that is a call to the builtin function; if convenient,
5276 the result should be placed in TARGET. */
5277 static rtx
5278 expand_builtin_signbit (tree exp, rtx target)
5280 const struct real_format *fmt;
5281 enum machine_mode fmode, imode, rmode;
5282 HOST_WIDE_INT hi, lo;
5283 tree arg;
5284 int word, bitpos;
5285 enum insn_code icode;
5286 rtx temp;
5287 location_t loc = EXPR_LOCATION (exp);
5289 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5290 return NULL_RTX;
5292 arg = CALL_EXPR_ARG (exp, 0);
5293 fmode = TYPE_MODE (TREE_TYPE (arg));
5294 rmode = TYPE_MODE (TREE_TYPE (exp));
5295 fmt = REAL_MODE_FORMAT (fmode);
5297 arg = builtin_save_expr (arg);
5299 /* Expand the argument yielding a RTX expression. */
5300 temp = expand_normal (arg);
5302 /* Check if the back end provides an insn that handles signbit for the
5303 argument's mode. */
5304 icode = signbit_optab->handlers [(int) fmode].insn_code;
5305 if (icode != CODE_FOR_nothing)
5307 rtx last = get_last_insn ();
5308 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5309 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5310 return target;
5311 delete_insns_since (last);
5314 /* For floating point formats without a sign bit, implement signbit
5315 as "ARG < 0.0". */
5316 bitpos = fmt->signbit_ro;
5317 if (bitpos < 0)
5319 /* But we can't do this if the format supports signed zero. */
5320 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5321 return NULL_RTX;
5323 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5324 build_real (TREE_TYPE (arg), dconst0));
5325 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5328 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5330 imode = int_mode_for_mode (fmode);
5331 if (imode == BLKmode)
5332 return NULL_RTX;
5333 temp = gen_lowpart (imode, temp);
5335 else
5337 imode = word_mode;
5338 /* Handle targets with different FP word orders. */
5339 if (FLOAT_WORDS_BIG_ENDIAN)
5340 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5341 else
5342 word = bitpos / BITS_PER_WORD;
5343 temp = operand_subword_force (temp, word, fmode);
5344 bitpos = bitpos % BITS_PER_WORD;
5347 /* Force the intermediate word_mode (or narrower) result into a
5348 register. This avoids attempting to create paradoxical SUBREGs
5349 of floating point modes below. */
5350 temp = force_reg (imode, temp);
5352 /* If the bitpos is within the "result mode" lowpart, the operation
5353 can be implement with a single bitwise AND. Otherwise, we need
5354 a right shift and an AND. */
5356 if (bitpos < GET_MODE_BITSIZE (rmode))
5358 if (bitpos < HOST_BITS_PER_WIDE_INT)
5360 hi = 0;
5361 lo = (HOST_WIDE_INT) 1 << bitpos;
5363 else
5365 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5366 lo = 0;
5369 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5370 temp = gen_lowpart (rmode, temp);
5371 temp = expand_binop (rmode, and_optab, temp,
5372 immed_double_const (lo, hi, rmode),
5373 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5375 else
5377 /* Perform a logical right shift to place the signbit in the least
5378 significant bit, then truncate the result to the desired mode
5379 and mask just this bit. */
5380 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5381 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5382 temp = gen_lowpart (rmode, temp);
5383 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5384 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5387 return temp;
5390 /* Expand fork or exec calls. TARGET is the desired target of the
5391 call. EXP is the call. FN is the
5392 identificator of the actual function. IGNORE is nonzero if the
5393 value is to be ignored. */
5395 static rtx
5396 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5398 tree id, decl;
5399 tree call;
5401 /* If we are not profiling, just call the function. */
5402 if (!profile_arc_flag)
5403 return NULL_RTX;
5405 /* Otherwise call the wrapper. This should be equivalent for the rest of
5406 compiler, so the code does not diverge, and the wrapper may run the
5407 code necessary for keeping the profiling sane. */
5409 switch (DECL_FUNCTION_CODE (fn))
5411 case BUILT_IN_FORK:
5412 id = get_identifier ("__gcov_fork");
5413 break;
5415 case BUILT_IN_EXECL:
5416 id = get_identifier ("__gcov_execl");
5417 break;
5419 case BUILT_IN_EXECV:
5420 id = get_identifier ("__gcov_execv");
5421 break;
5423 case BUILT_IN_EXECLP:
5424 id = get_identifier ("__gcov_execlp");
5425 break;
5427 case BUILT_IN_EXECLE:
5428 id = get_identifier ("__gcov_execle");
5429 break;
5431 case BUILT_IN_EXECVP:
5432 id = get_identifier ("__gcov_execvp");
5433 break;
5435 case BUILT_IN_EXECVE:
5436 id = get_identifier ("__gcov_execve");
5437 break;
5439 default:
5440 gcc_unreachable ();
5443 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5444 FUNCTION_DECL, id, TREE_TYPE (fn));
5445 DECL_EXTERNAL (decl) = 1;
5446 TREE_PUBLIC (decl) = 1;
5447 DECL_ARTIFICIAL (decl) = 1;
5448 TREE_NOTHROW (decl) = 1;
5449 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5450 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5451 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5452 return expand_call (call, target, ignore);
5457 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5458 the pointer in these functions is void*, the tree optimizers may remove
5459 casts. The mode computed in expand_builtin isn't reliable either, due
5460 to __sync_bool_compare_and_swap.
5462 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5463 group of builtins. This gives us log2 of the mode size. */
5465 static inline enum machine_mode
5466 get_builtin_sync_mode (int fcode_diff)
5468 /* The size is not negotiable, so ask not to get BLKmode in return
5469 if the target indicates that a smaller size would be better. */
5470 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5473 /* Expand the memory expression LOC and return the appropriate memory operand
5474 for the builtin_sync operations. */
5476 static rtx
5477 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5479 rtx addr, mem;
5481 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5482 addr = convert_memory_address (Pmode, addr);
5484 /* Note that we explicitly do not want any alias information for this
5485 memory, so that we kill all other live memories. Otherwise we don't
5486 satisfy the full barrier semantics of the intrinsic. */
5487 mem = validize_mem (gen_rtx_MEM (mode, addr));
5489 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5490 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5491 MEM_VOLATILE_P (mem) = 1;
5493 return mem;
5496 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5497 EXP is the CALL_EXPR. CODE is the rtx code
5498 that corresponds to the arithmetic or logical operation from the name;
5499 an exception here is that NOT actually means NAND. TARGET is an optional
5500 place for us to store the results; AFTER is true if this is the
5501 fetch_and_xxx form. IGNORE is true if we don't actually care about
5502 the result of the operation at all. */
5504 static rtx
5505 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5506 enum rtx_code code, bool after,
5507 rtx target, bool ignore)
5509 rtx val, mem;
5510 enum machine_mode old_mode;
5511 location_t loc = EXPR_LOCATION (exp);
5513 if (code == NOT && warn_sync_nand)
5515 tree fndecl = get_callee_fndecl (exp);
5516 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5518 static bool warned_f_a_n, warned_n_a_f;
5520 switch (fcode)
5522 case BUILT_IN_FETCH_AND_NAND_1:
5523 case BUILT_IN_FETCH_AND_NAND_2:
5524 case BUILT_IN_FETCH_AND_NAND_4:
5525 case BUILT_IN_FETCH_AND_NAND_8:
5526 case BUILT_IN_FETCH_AND_NAND_16:
5528 if (warned_f_a_n)
5529 break;
5531 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5532 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5533 warned_f_a_n = true;
5534 break;
5536 case BUILT_IN_NAND_AND_FETCH_1:
5537 case BUILT_IN_NAND_AND_FETCH_2:
5538 case BUILT_IN_NAND_AND_FETCH_4:
5539 case BUILT_IN_NAND_AND_FETCH_8:
5540 case BUILT_IN_NAND_AND_FETCH_16:
5542 if (warned_n_a_f)
5543 break;
5545 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5546 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5547 warned_n_a_f = true;
5548 break;
5550 default:
5551 gcc_unreachable ();
5555 /* Expand the operands. */
5556 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5558 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5559 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5560 of CONST_INTs, where we know the old_mode only from the call argument. */
5561 old_mode = GET_MODE (val);
5562 if (old_mode == VOIDmode)
5563 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5564 val = convert_modes (mode, old_mode, val, 1);
5566 if (ignore)
5567 return expand_sync_operation (mem, val, code);
5568 else
5569 return expand_sync_fetch_operation (mem, val, code, after, target);
5572 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5573 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5574 true if this is the boolean form. TARGET is a place for us to store the
5575 results; this is NOT optional if IS_BOOL is true. */
5577 static rtx
5578 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5579 bool is_bool, rtx target)
5581 rtx old_val, new_val, mem;
5582 enum machine_mode old_mode;
5584 /* Expand the operands. */
5585 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5588 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5589 mode, EXPAND_NORMAL);
5590 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5591 of CONST_INTs, where we know the old_mode only from the call argument. */
5592 old_mode = GET_MODE (old_val);
5593 if (old_mode == VOIDmode)
5594 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5595 old_val = convert_modes (mode, old_mode, old_val, 1);
5597 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5598 mode, EXPAND_NORMAL);
5599 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5600 of CONST_INTs, where we know the old_mode only from the call argument. */
5601 old_mode = GET_MODE (new_val);
5602 if (old_mode == VOIDmode)
5603 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5604 new_val = convert_modes (mode, old_mode, new_val, 1);
5606 if (is_bool)
5607 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5608 else
5609 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5612 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5613 general form is actually an atomic exchange, and some targets only
5614 support a reduced form with the second argument being a constant 1.
5615 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5616 the results. */
5618 static rtx
5619 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5620 rtx target)
5622 rtx val, mem;
5623 enum machine_mode old_mode;
5625 /* Expand the operands. */
5626 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5627 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5628 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5629 of CONST_INTs, where we know the old_mode only from the call argument. */
5630 old_mode = GET_MODE (val);
5631 if (old_mode == VOIDmode)
5632 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5633 val = convert_modes (mode, old_mode, val, 1);
5635 return expand_sync_lock_test_and_set (mem, val, target);
5638 /* Expand the __sync_synchronize intrinsic. */
5640 static void
5641 expand_builtin_synchronize (void)
5643 gimple x;
5644 VEC (tree, gc) *v_clobbers;
5646 #ifdef HAVE_memory_barrier
5647 if (HAVE_memory_barrier)
5649 emit_insn (gen_memory_barrier ());
5650 return;
5652 #endif
5654 if (synchronize_libfunc != NULL_RTX)
5656 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5657 return;
5660 /* If no explicit memory barrier instruction is available, create an
5661 empty asm stmt with a memory clobber. */
5662 v_clobbers = VEC_alloc (tree, gc, 1);
5663 VEC_quick_push (tree, v_clobbers,
5664 tree_cons (NULL, build_string (6, "memory"), NULL));
5665 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5666 gimple_asm_set_volatile (x, true);
5667 expand_asm_stmt (x);
5670 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5672 static void
5673 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5675 enum insn_code icode;
5676 rtx mem, insn;
5677 rtx val = const0_rtx;
5679 /* Expand the operands. */
5680 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5682 /* If there is an explicit operation in the md file, use it. */
5683 icode = sync_lock_release[mode];
5684 if (icode != CODE_FOR_nothing)
5686 if (!insn_data[icode].operand[1].predicate (val, mode))
5687 val = force_reg (mode, val);
5689 insn = GEN_FCN (icode) (mem, val);
5690 if (insn)
5692 emit_insn (insn);
5693 return;
5697 /* Otherwise we can implement this operation by emitting a barrier
5698 followed by a store of zero. */
5699 expand_builtin_synchronize ();
5700 emit_move_insn (mem, val);
5703 /* Expand an expression EXP that calls a built-in function,
5704 with result going to TARGET if that's convenient
5705 (and in mode MODE if that's convenient).
5706 SUBTARGET may be used as the target for computing one of EXP's operands.
5707 IGNORE is nonzero if the value is to be ignored. */
5710 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5711 int ignore)
5713 tree fndecl = get_callee_fndecl (exp);
5714 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5715 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5717 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5718 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5720 /* When not optimizing, generate calls to library functions for a certain
5721 set of builtins. */
5722 if (!optimize
5723 && !called_as_built_in (fndecl)
5724 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5725 && fcode != BUILT_IN_ALLOCA
5726 && fcode != BUILT_IN_FREE)
5727 return expand_call (exp, target, ignore);
5729 /* The built-in function expanders test for target == const0_rtx
5730 to determine whether the function's result will be ignored. */
5731 if (ignore)
5732 target = const0_rtx;
5734 /* If the result of a pure or const built-in function is ignored, and
5735 none of its arguments are volatile, we can avoid expanding the
5736 built-in call and just evaluate the arguments for side-effects. */
5737 if (target == const0_rtx
5738 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5740 bool volatilep = false;
5741 tree arg;
5742 call_expr_arg_iterator iter;
5744 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5745 if (TREE_THIS_VOLATILE (arg))
5747 volatilep = true;
5748 break;
5751 if (! volatilep)
5753 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5754 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5755 return const0_rtx;
5759 switch (fcode)
5761 CASE_FLT_FN (BUILT_IN_FABS):
5762 target = expand_builtin_fabs (exp, target, subtarget);
5763 if (target)
5764 return target;
5765 break;
5767 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5768 target = expand_builtin_copysign (exp, target, subtarget);
5769 if (target)
5770 return target;
5771 break;
5773 /* Just do a normal library call if we were unable to fold
5774 the values. */
5775 CASE_FLT_FN (BUILT_IN_CABS):
5776 break;
5778 CASE_FLT_FN (BUILT_IN_EXP):
5779 CASE_FLT_FN (BUILT_IN_EXP10):
5780 CASE_FLT_FN (BUILT_IN_POW10):
5781 CASE_FLT_FN (BUILT_IN_EXP2):
5782 CASE_FLT_FN (BUILT_IN_EXPM1):
5783 CASE_FLT_FN (BUILT_IN_LOGB):
5784 CASE_FLT_FN (BUILT_IN_LOG):
5785 CASE_FLT_FN (BUILT_IN_LOG10):
5786 CASE_FLT_FN (BUILT_IN_LOG2):
5787 CASE_FLT_FN (BUILT_IN_LOG1P):
5788 CASE_FLT_FN (BUILT_IN_TAN):
5789 CASE_FLT_FN (BUILT_IN_ASIN):
5790 CASE_FLT_FN (BUILT_IN_ACOS):
5791 CASE_FLT_FN (BUILT_IN_ATAN):
5792 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5793 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5794 because of possible accuracy problems. */
5795 if (! flag_unsafe_math_optimizations)
5796 break;
5797 CASE_FLT_FN (BUILT_IN_SQRT):
5798 CASE_FLT_FN (BUILT_IN_FLOOR):
5799 CASE_FLT_FN (BUILT_IN_CEIL):
5800 CASE_FLT_FN (BUILT_IN_TRUNC):
5801 CASE_FLT_FN (BUILT_IN_ROUND):
5802 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5803 CASE_FLT_FN (BUILT_IN_RINT):
5804 target = expand_builtin_mathfn (exp, target, subtarget);
5805 if (target)
5806 return target;
5807 break;
5809 CASE_FLT_FN (BUILT_IN_ILOGB):
5810 if (! flag_unsafe_math_optimizations)
5811 break;
5812 CASE_FLT_FN (BUILT_IN_ISINF):
5813 CASE_FLT_FN (BUILT_IN_FINITE):
5814 case BUILT_IN_ISFINITE:
5815 case BUILT_IN_ISNORMAL:
5816 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5817 if (target)
5818 return target;
5819 break;
5821 CASE_FLT_FN (BUILT_IN_LCEIL):
5822 CASE_FLT_FN (BUILT_IN_LLCEIL):
5823 CASE_FLT_FN (BUILT_IN_LFLOOR):
5824 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5825 target = expand_builtin_int_roundingfn (exp, target);
5826 if (target)
5827 return target;
5828 break;
5830 CASE_FLT_FN (BUILT_IN_LRINT):
5831 CASE_FLT_FN (BUILT_IN_LLRINT):
5832 CASE_FLT_FN (BUILT_IN_LROUND):
5833 CASE_FLT_FN (BUILT_IN_LLROUND):
5834 target = expand_builtin_int_roundingfn_2 (exp, target);
5835 if (target)
5836 return target;
5837 break;
5839 CASE_FLT_FN (BUILT_IN_POW):
5840 target = expand_builtin_pow (exp, target, subtarget);
5841 if (target)
5842 return target;
5843 break;
5845 CASE_FLT_FN (BUILT_IN_POWI):
5846 target = expand_builtin_powi (exp, target, subtarget);
5847 if (target)
5848 return target;
5849 break;
5851 CASE_FLT_FN (BUILT_IN_ATAN2):
5852 CASE_FLT_FN (BUILT_IN_LDEXP):
5853 CASE_FLT_FN (BUILT_IN_SCALB):
5854 CASE_FLT_FN (BUILT_IN_SCALBN):
5855 CASE_FLT_FN (BUILT_IN_SCALBLN):
5856 if (! flag_unsafe_math_optimizations)
5857 break;
5859 CASE_FLT_FN (BUILT_IN_FMOD):
5860 CASE_FLT_FN (BUILT_IN_REMAINDER):
5861 CASE_FLT_FN (BUILT_IN_DREM):
5862 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5863 if (target)
5864 return target;
5865 break;
5867 CASE_FLT_FN (BUILT_IN_CEXPI):
5868 target = expand_builtin_cexpi (exp, target, subtarget);
5869 gcc_assert (target);
5870 return target;
5872 CASE_FLT_FN (BUILT_IN_SIN):
5873 CASE_FLT_FN (BUILT_IN_COS):
5874 if (! flag_unsafe_math_optimizations)
5875 break;
5876 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5877 if (target)
5878 return target;
5879 break;
5881 CASE_FLT_FN (BUILT_IN_SINCOS):
5882 if (! flag_unsafe_math_optimizations)
5883 break;
5884 target = expand_builtin_sincos (exp);
5885 if (target)
5886 return target;
5887 break;
5889 case BUILT_IN_APPLY_ARGS:
5890 return expand_builtin_apply_args ();
5892 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5893 FUNCTION with a copy of the parameters described by
5894 ARGUMENTS, and ARGSIZE. It returns a block of memory
5895 allocated on the stack into which is stored all the registers
5896 that might possibly be used for returning the result of a
5897 function. ARGUMENTS is the value returned by
5898 __builtin_apply_args. ARGSIZE is the number of bytes of
5899 arguments that must be copied. ??? How should this value be
5900 computed? We'll also need a safe worst case value for varargs
5901 functions. */
5902 case BUILT_IN_APPLY:
5903 if (!validate_arglist (exp, POINTER_TYPE,
5904 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5905 && !validate_arglist (exp, REFERENCE_TYPE,
5906 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5907 return const0_rtx;
5908 else
5910 rtx ops[3];
5912 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5913 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5914 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5916 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5919 /* __builtin_return (RESULT) causes the function to return the
5920 value described by RESULT. RESULT is address of the block of
5921 memory returned by __builtin_apply. */
5922 case BUILT_IN_RETURN:
5923 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5924 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5925 return const0_rtx;
5927 case BUILT_IN_SAVEREGS:
5928 return expand_builtin_saveregs ();
5930 case BUILT_IN_ARGS_INFO:
5931 return expand_builtin_args_info (exp);
5933 case BUILT_IN_VA_ARG_PACK:
5934 /* All valid uses of __builtin_va_arg_pack () are removed during
5935 inlining. */
5936 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5937 return const0_rtx;
5939 case BUILT_IN_VA_ARG_PACK_LEN:
5940 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5941 inlining. */
5942 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5943 return const0_rtx;
5945 /* Return the address of the first anonymous stack arg. */
5946 case BUILT_IN_NEXT_ARG:
5947 if (fold_builtin_next_arg (exp, false))
5948 return const0_rtx;
5949 return expand_builtin_next_arg ();
5951 case BUILT_IN_CLEAR_CACHE:
5952 target = expand_builtin___clear_cache (exp);
5953 if (target)
5954 return target;
5955 break;
5957 case BUILT_IN_CLASSIFY_TYPE:
5958 return expand_builtin_classify_type (exp);
5960 case BUILT_IN_CONSTANT_P:
5961 return const0_rtx;
5963 case BUILT_IN_FRAME_ADDRESS:
5964 case BUILT_IN_RETURN_ADDRESS:
5965 return expand_builtin_frame_address (fndecl, exp);
5967 /* Returns the address of the area where the structure is returned.
5968 0 otherwise. */
5969 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5970 if (call_expr_nargs (exp) != 0
5971 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5972 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5973 return const0_rtx;
5974 else
5975 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5977 case BUILT_IN_ALLOCA:
5978 target = expand_builtin_alloca (exp, target);
5979 if (target)
5980 return target;
5981 break;
5983 case BUILT_IN_STACK_SAVE:
5984 return expand_stack_save ();
5986 case BUILT_IN_STACK_RESTORE:
5987 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5988 return const0_rtx;
5990 case BUILT_IN_BSWAP32:
5991 case BUILT_IN_BSWAP64:
5992 target = expand_builtin_bswap (exp, target, subtarget);
5994 if (target)
5995 return target;
5996 break;
5998 CASE_INT_FN (BUILT_IN_FFS):
5999 case BUILT_IN_FFSIMAX:
6000 target = expand_builtin_unop (target_mode, exp, target,
6001 subtarget, ffs_optab);
6002 if (target)
6003 return target;
6004 break;
6006 CASE_INT_FN (BUILT_IN_CLZ):
6007 case BUILT_IN_CLZIMAX:
6008 target = expand_builtin_unop (target_mode, exp, target,
6009 subtarget, clz_optab);
6010 if (target)
6011 return target;
6012 break;
6014 CASE_INT_FN (BUILT_IN_CTZ):
6015 case BUILT_IN_CTZIMAX:
6016 target = expand_builtin_unop (target_mode, exp, target,
6017 subtarget, ctz_optab);
6018 if (target)
6019 return target;
6020 break;
6022 CASE_INT_FN (BUILT_IN_POPCOUNT):
6023 case BUILT_IN_POPCOUNTIMAX:
6024 target = expand_builtin_unop (target_mode, exp, target,
6025 subtarget, popcount_optab);
6026 if (target)
6027 return target;
6028 break;
6030 CASE_INT_FN (BUILT_IN_PARITY):
6031 case BUILT_IN_PARITYIMAX:
6032 target = expand_builtin_unop (target_mode, exp, target,
6033 subtarget, parity_optab);
6034 if (target)
6035 return target;
6036 break;
6038 case BUILT_IN_STRLEN:
6039 target = expand_builtin_strlen (exp, target, target_mode);
6040 if (target)
6041 return target;
6042 break;
6044 case BUILT_IN_STRCPY:
6045 target = expand_builtin_strcpy (exp, target);
6046 if (target)
6047 return target;
6048 break;
6050 case BUILT_IN_STRNCPY:
6051 target = expand_builtin_strncpy (exp, target);
6052 if (target)
6053 return target;
6054 break;
6056 case BUILT_IN_STPCPY:
6057 target = expand_builtin_stpcpy (exp, target, mode);
6058 if (target)
6059 return target;
6060 break;
6062 case BUILT_IN_MEMCPY:
6063 target = expand_builtin_memcpy (exp, target);
6064 if (target)
6065 return target;
6066 break;
6068 case BUILT_IN_MEMPCPY:
6069 target = expand_builtin_mempcpy (exp, target, mode);
6070 if (target)
6071 return target;
6072 break;
6074 case BUILT_IN_MEMSET:
6075 target = expand_builtin_memset (exp, target, mode);
6076 if (target)
6077 return target;
6078 break;
6080 case BUILT_IN_BZERO:
6081 target = expand_builtin_bzero (exp);
6082 if (target)
6083 return target;
6084 break;
6086 case BUILT_IN_STRCMP:
6087 target = expand_builtin_strcmp (exp, target);
6088 if (target)
6089 return target;
6090 break;
6092 case BUILT_IN_STRNCMP:
6093 target = expand_builtin_strncmp (exp, target, mode);
6094 if (target)
6095 return target;
6096 break;
6098 case BUILT_IN_BCMP:
6099 case BUILT_IN_MEMCMP:
6100 target = expand_builtin_memcmp (exp, target, mode);
6101 if (target)
6102 return target;
6103 break;
6105 case BUILT_IN_SETJMP:
6106 /* This should have been lowered to the builtins below. */
6107 gcc_unreachable ();
6109 case BUILT_IN_SETJMP_SETUP:
6110 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6111 and the receiver label. */
6112 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6114 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6115 VOIDmode, EXPAND_NORMAL);
6116 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6117 rtx label_r = label_rtx (label);
6119 /* This is copied from the handling of non-local gotos. */
6120 expand_builtin_setjmp_setup (buf_addr, label_r);
6121 nonlocal_goto_handler_labels
6122 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6123 nonlocal_goto_handler_labels);
6124 /* ??? Do not let expand_label treat us as such since we would
6125 not want to be both on the list of non-local labels and on
6126 the list of forced labels. */
6127 FORCED_LABEL (label) = 0;
6128 return const0_rtx;
6130 break;
6132 case BUILT_IN_SETJMP_DISPATCHER:
6133 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6134 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6136 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6137 rtx label_r = label_rtx (label);
6139 /* Remove the dispatcher label from the list of non-local labels
6140 since the receiver labels have been added to it above. */
6141 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6142 return const0_rtx;
6144 break;
6146 case BUILT_IN_SETJMP_RECEIVER:
6147 /* __builtin_setjmp_receiver is passed the receiver label. */
6148 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6150 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6151 rtx label_r = label_rtx (label);
6153 expand_builtin_setjmp_receiver (label_r);
6154 return const0_rtx;
6156 break;
6158 /* __builtin_longjmp is passed a pointer to an array of five words.
6159 It's similar to the C library longjmp function but works with
6160 __builtin_setjmp above. */
6161 case BUILT_IN_LONGJMP:
6162 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6164 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6165 VOIDmode, EXPAND_NORMAL);
6166 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6168 if (value != const1_rtx)
6170 error ("%<__builtin_longjmp%> second argument must be 1");
6171 return const0_rtx;
6174 expand_builtin_longjmp (buf_addr, value);
6175 return const0_rtx;
6177 break;
6179 case BUILT_IN_NONLOCAL_GOTO:
6180 target = expand_builtin_nonlocal_goto (exp);
6181 if (target)
6182 return target;
6183 break;
6185 /* This updates the setjmp buffer that is its argument with the value
6186 of the current stack pointer. */
6187 case BUILT_IN_UPDATE_SETJMP_BUF:
6188 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6190 rtx buf_addr
6191 = expand_normal (CALL_EXPR_ARG (exp, 0));
6193 expand_builtin_update_setjmp_buf (buf_addr);
6194 return const0_rtx;
6196 break;
6198 case BUILT_IN_TRAP:
6199 expand_builtin_trap ();
6200 return const0_rtx;
6202 case BUILT_IN_UNREACHABLE:
6203 expand_builtin_unreachable ();
6204 return const0_rtx;
6206 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6207 case BUILT_IN_SIGNBITD32:
6208 case BUILT_IN_SIGNBITD64:
6209 case BUILT_IN_SIGNBITD128:
6210 target = expand_builtin_signbit (exp, target);
6211 if (target)
6212 return target;
6213 break;
6215 /* Various hooks for the DWARF 2 __throw routine. */
6216 case BUILT_IN_UNWIND_INIT:
6217 expand_builtin_unwind_init ();
6218 return const0_rtx;
6219 case BUILT_IN_DWARF_CFA:
6220 return virtual_cfa_rtx;
6221 #ifdef DWARF2_UNWIND_INFO
6222 case BUILT_IN_DWARF_SP_COLUMN:
6223 return expand_builtin_dwarf_sp_column ();
6224 case BUILT_IN_INIT_DWARF_REG_SIZES:
6225 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6226 return const0_rtx;
6227 #endif
6228 case BUILT_IN_FROB_RETURN_ADDR:
6229 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6230 case BUILT_IN_EXTRACT_RETURN_ADDR:
6231 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6232 case BUILT_IN_EH_RETURN:
6233 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6234 CALL_EXPR_ARG (exp, 1));
6235 return const0_rtx;
6236 #ifdef EH_RETURN_DATA_REGNO
6237 case BUILT_IN_EH_RETURN_DATA_REGNO:
6238 return expand_builtin_eh_return_data_regno (exp);
6239 #endif
6240 case BUILT_IN_EXTEND_POINTER:
6241 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6242 case BUILT_IN_EH_POINTER:
6243 return expand_builtin_eh_pointer (exp);
6244 case BUILT_IN_EH_FILTER:
6245 return expand_builtin_eh_filter (exp);
6246 case BUILT_IN_EH_COPY_VALUES:
6247 return expand_builtin_eh_copy_values (exp);
6249 case BUILT_IN_VA_START:
6250 return expand_builtin_va_start (exp);
6251 case BUILT_IN_VA_END:
6252 return expand_builtin_va_end (exp);
6253 case BUILT_IN_VA_COPY:
6254 return expand_builtin_va_copy (exp);
6255 case BUILT_IN_EXPECT:
6256 return expand_builtin_expect (exp, target);
6257 case BUILT_IN_PREFETCH:
6258 expand_builtin_prefetch (exp);
6259 return const0_rtx;
6261 case BUILT_IN_PROFILE_FUNC_ENTER:
6262 return expand_builtin_profile_func (false);
6263 case BUILT_IN_PROFILE_FUNC_EXIT:
6264 return expand_builtin_profile_func (true);
6266 case BUILT_IN_INIT_TRAMPOLINE:
6267 return expand_builtin_init_trampoline (exp);
6268 case BUILT_IN_ADJUST_TRAMPOLINE:
6269 return expand_builtin_adjust_trampoline (exp);
6271 case BUILT_IN_FORK:
6272 case BUILT_IN_EXECL:
6273 case BUILT_IN_EXECV:
6274 case BUILT_IN_EXECLP:
6275 case BUILT_IN_EXECLE:
6276 case BUILT_IN_EXECVP:
6277 case BUILT_IN_EXECVE:
6278 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6279 if (target)
6280 return target;
6281 break;
6283 case BUILT_IN_FETCH_AND_ADD_1:
6284 case BUILT_IN_FETCH_AND_ADD_2:
6285 case BUILT_IN_FETCH_AND_ADD_4:
6286 case BUILT_IN_FETCH_AND_ADD_8:
6287 case BUILT_IN_FETCH_AND_ADD_16:
6288 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6289 target = expand_builtin_sync_operation (mode, exp, PLUS,
6290 false, target, ignore);
6291 if (target)
6292 return target;
6293 break;
6295 case BUILT_IN_FETCH_AND_SUB_1:
6296 case BUILT_IN_FETCH_AND_SUB_2:
6297 case BUILT_IN_FETCH_AND_SUB_4:
6298 case BUILT_IN_FETCH_AND_SUB_8:
6299 case BUILT_IN_FETCH_AND_SUB_16:
6300 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6301 target = expand_builtin_sync_operation (mode, exp, MINUS,
6302 false, target, ignore);
6303 if (target)
6304 return target;
6305 break;
6307 case BUILT_IN_FETCH_AND_OR_1:
6308 case BUILT_IN_FETCH_AND_OR_2:
6309 case BUILT_IN_FETCH_AND_OR_4:
6310 case BUILT_IN_FETCH_AND_OR_8:
6311 case BUILT_IN_FETCH_AND_OR_16:
6312 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6313 target = expand_builtin_sync_operation (mode, exp, IOR,
6314 false, target, ignore);
6315 if (target)
6316 return target;
6317 break;
6319 case BUILT_IN_FETCH_AND_AND_1:
6320 case BUILT_IN_FETCH_AND_AND_2:
6321 case BUILT_IN_FETCH_AND_AND_4:
6322 case BUILT_IN_FETCH_AND_AND_8:
6323 case BUILT_IN_FETCH_AND_AND_16:
6324 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6325 target = expand_builtin_sync_operation (mode, exp, AND,
6326 false, target, ignore);
6327 if (target)
6328 return target;
6329 break;
6331 case BUILT_IN_FETCH_AND_XOR_1:
6332 case BUILT_IN_FETCH_AND_XOR_2:
6333 case BUILT_IN_FETCH_AND_XOR_4:
6334 case BUILT_IN_FETCH_AND_XOR_8:
6335 case BUILT_IN_FETCH_AND_XOR_16:
6336 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6337 target = expand_builtin_sync_operation (mode, exp, XOR,
6338 false, target, ignore);
6339 if (target)
6340 return target;
6341 break;
6343 case BUILT_IN_FETCH_AND_NAND_1:
6344 case BUILT_IN_FETCH_AND_NAND_2:
6345 case BUILT_IN_FETCH_AND_NAND_4:
6346 case BUILT_IN_FETCH_AND_NAND_8:
6347 case BUILT_IN_FETCH_AND_NAND_16:
6348 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6349 target = expand_builtin_sync_operation (mode, exp, NOT,
6350 false, target, ignore);
6351 if (target)
6352 return target;
6353 break;
6355 case BUILT_IN_ADD_AND_FETCH_1:
6356 case BUILT_IN_ADD_AND_FETCH_2:
6357 case BUILT_IN_ADD_AND_FETCH_4:
6358 case BUILT_IN_ADD_AND_FETCH_8:
6359 case BUILT_IN_ADD_AND_FETCH_16:
6360 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6361 target = expand_builtin_sync_operation (mode, exp, PLUS,
6362 true, target, ignore);
6363 if (target)
6364 return target;
6365 break;
6367 case BUILT_IN_SUB_AND_FETCH_1:
6368 case BUILT_IN_SUB_AND_FETCH_2:
6369 case BUILT_IN_SUB_AND_FETCH_4:
6370 case BUILT_IN_SUB_AND_FETCH_8:
6371 case BUILT_IN_SUB_AND_FETCH_16:
6372 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6373 target = expand_builtin_sync_operation (mode, exp, MINUS,
6374 true, target, ignore);
6375 if (target)
6376 return target;
6377 break;
6379 case BUILT_IN_OR_AND_FETCH_1:
6380 case BUILT_IN_OR_AND_FETCH_2:
6381 case BUILT_IN_OR_AND_FETCH_4:
6382 case BUILT_IN_OR_AND_FETCH_8:
6383 case BUILT_IN_OR_AND_FETCH_16:
6384 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6385 target = expand_builtin_sync_operation (mode, exp, IOR,
6386 true, target, ignore);
6387 if (target)
6388 return target;
6389 break;
6391 case BUILT_IN_AND_AND_FETCH_1:
6392 case BUILT_IN_AND_AND_FETCH_2:
6393 case BUILT_IN_AND_AND_FETCH_4:
6394 case BUILT_IN_AND_AND_FETCH_8:
6395 case BUILT_IN_AND_AND_FETCH_16:
6396 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6397 target = expand_builtin_sync_operation (mode, exp, AND,
6398 true, target, ignore);
6399 if (target)
6400 return target;
6401 break;
6403 case BUILT_IN_XOR_AND_FETCH_1:
6404 case BUILT_IN_XOR_AND_FETCH_2:
6405 case BUILT_IN_XOR_AND_FETCH_4:
6406 case BUILT_IN_XOR_AND_FETCH_8:
6407 case BUILT_IN_XOR_AND_FETCH_16:
6408 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6409 target = expand_builtin_sync_operation (mode, exp, XOR,
6410 true, target, ignore);
6411 if (target)
6412 return target;
6413 break;
6415 case BUILT_IN_NAND_AND_FETCH_1:
6416 case BUILT_IN_NAND_AND_FETCH_2:
6417 case BUILT_IN_NAND_AND_FETCH_4:
6418 case BUILT_IN_NAND_AND_FETCH_8:
6419 case BUILT_IN_NAND_AND_FETCH_16:
6420 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6421 target = expand_builtin_sync_operation (mode, exp, NOT,
6422 true, target, ignore);
6423 if (target)
6424 return target;
6425 break;
6427 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6428 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6429 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6430 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6431 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6432 if (mode == VOIDmode)
6433 mode = TYPE_MODE (boolean_type_node);
6434 if (!target || !register_operand (target, mode))
6435 target = gen_reg_rtx (mode);
6437 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6438 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6439 if (target)
6440 return target;
6441 break;
6443 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6444 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6445 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6446 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6447 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6448 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6449 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6450 if (target)
6451 return target;
6452 break;
6454 case BUILT_IN_LOCK_TEST_AND_SET_1:
6455 case BUILT_IN_LOCK_TEST_AND_SET_2:
6456 case BUILT_IN_LOCK_TEST_AND_SET_4:
6457 case BUILT_IN_LOCK_TEST_AND_SET_8:
6458 case BUILT_IN_LOCK_TEST_AND_SET_16:
6459 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6460 target = expand_builtin_lock_test_and_set (mode, exp, target);
6461 if (target)
6462 return target;
6463 break;
6465 case BUILT_IN_LOCK_RELEASE_1:
6466 case BUILT_IN_LOCK_RELEASE_2:
6467 case BUILT_IN_LOCK_RELEASE_4:
6468 case BUILT_IN_LOCK_RELEASE_8:
6469 case BUILT_IN_LOCK_RELEASE_16:
6470 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6471 expand_builtin_lock_release (mode, exp);
6472 return const0_rtx;
6474 case BUILT_IN_SYNCHRONIZE:
6475 expand_builtin_synchronize ();
6476 return const0_rtx;
6478 case BUILT_IN_OBJECT_SIZE:
6479 return expand_builtin_object_size (exp);
6481 case BUILT_IN_MEMCPY_CHK:
6482 case BUILT_IN_MEMPCPY_CHK:
6483 case BUILT_IN_MEMMOVE_CHK:
6484 case BUILT_IN_MEMSET_CHK:
6485 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6486 if (target)
6487 return target;
6488 break;
6490 case BUILT_IN_STRCPY_CHK:
6491 case BUILT_IN_STPCPY_CHK:
6492 case BUILT_IN_STRNCPY_CHK:
6493 case BUILT_IN_STRCAT_CHK:
6494 case BUILT_IN_STRNCAT_CHK:
6495 case BUILT_IN_SNPRINTF_CHK:
6496 case BUILT_IN_VSNPRINTF_CHK:
6497 maybe_emit_chk_warning (exp, fcode);
6498 break;
6500 case BUILT_IN_SPRINTF_CHK:
6501 case BUILT_IN_VSPRINTF_CHK:
6502 maybe_emit_sprintf_chk_warning (exp, fcode);
6503 break;
6505 case BUILT_IN_FREE:
6506 maybe_emit_free_warning (exp);
6507 break;
6509 default: /* just do library call, if unknown builtin */
6510 break;
6513 /* The switch statement above can drop through to cause the function
6514 to be called normally. */
6515 return expand_call (exp, target, ignore);
6518 /* Determine whether a tree node represents a call to a built-in
6519 function. If the tree T is a call to a built-in function with
6520 the right number of arguments of the appropriate types, return
6521 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6522 Otherwise the return value is END_BUILTINS. */
6524 enum built_in_function
6525 builtin_mathfn_code (const_tree t)
6527 const_tree fndecl, arg, parmlist;
6528 const_tree argtype, parmtype;
6529 const_call_expr_arg_iterator iter;
6531 if (TREE_CODE (t) != CALL_EXPR
6532 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6533 return END_BUILTINS;
6535 fndecl = get_callee_fndecl (t);
6536 if (fndecl == NULL_TREE
6537 || TREE_CODE (fndecl) != FUNCTION_DECL
6538 || ! DECL_BUILT_IN (fndecl)
6539 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6540 return END_BUILTINS;
6542 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6543 init_const_call_expr_arg_iterator (t, &iter);
6544 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6546 /* If a function doesn't take a variable number of arguments,
6547 the last element in the list will have type `void'. */
6548 parmtype = TREE_VALUE (parmlist);
6549 if (VOID_TYPE_P (parmtype))
6551 if (more_const_call_expr_args_p (&iter))
6552 return END_BUILTINS;
6553 return DECL_FUNCTION_CODE (fndecl);
6556 if (! more_const_call_expr_args_p (&iter))
6557 return END_BUILTINS;
6559 arg = next_const_call_expr_arg (&iter);
6560 argtype = TREE_TYPE (arg);
6562 if (SCALAR_FLOAT_TYPE_P (parmtype))
6564 if (! SCALAR_FLOAT_TYPE_P (argtype))
6565 return END_BUILTINS;
6567 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6569 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6570 return END_BUILTINS;
6572 else if (POINTER_TYPE_P (parmtype))
6574 if (! POINTER_TYPE_P (argtype))
6575 return END_BUILTINS;
6577 else if (INTEGRAL_TYPE_P (parmtype))
6579 if (! INTEGRAL_TYPE_P (argtype))
6580 return END_BUILTINS;
6582 else
6583 return END_BUILTINS;
6586 /* Variable-length argument list. */
6587 return DECL_FUNCTION_CODE (fndecl);
6590 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6591 evaluate to a constant. */
6593 static tree
6594 fold_builtin_constant_p (tree arg)
6596 /* We return 1 for a numeric type that's known to be a constant
6597 value at compile-time or for an aggregate type that's a
6598 literal constant. */
6599 STRIP_NOPS (arg);
6601 /* If we know this is a constant, emit the constant of one. */
6602 if (CONSTANT_CLASS_P (arg)
6603 || (TREE_CODE (arg) == CONSTRUCTOR
6604 && TREE_CONSTANT (arg)))
6605 return integer_one_node;
6606 if (TREE_CODE (arg) == ADDR_EXPR)
6608 tree op = TREE_OPERAND (arg, 0);
6609 if (TREE_CODE (op) == STRING_CST
6610 || (TREE_CODE (op) == ARRAY_REF
6611 && integer_zerop (TREE_OPERAND (op, 1))
6612 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6613 return integer_one_node;
6616 /* If this expression has side effects, show we don't know it to be a
6617 constant. Likewise if it's a pointer or aggregate type since in
6618 those case we only want literals, since those are only optimized
6619 when generating RTL, not later.
6620 And finally, if we are compiling an initializer, not code, we
6621 need to return a definite result now; there's not going to be any
6622 more optimization done. */
6623 if (TREE_SIDE_EFFECTS (arg)
6624 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6625 || POINTER_TYPE_P (TREE_TYPE (arg))
6626 || cfun == 0
6627 || folding_initializer)
6628 return integer_zero_node;
6630 return NULL_TREE;
6633 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6634 return it as a truthvalue. */
6636 static tree
6637 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6639 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6641 fn = built_in_decls[BUILT_IN_EXPECT];
6642 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6643 ret_type = TREE_TYPE (TREE_TYPE (fn));
6644 pred_type = TREE_VALUE (arg_types);
6645 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6647 pred = fold_convert_loc (loc, pred_type, pred);
6648 expected = fold_convert_loc (loc, expected_type, expected);
6649 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6651 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6652 build_int_cst (ret_type, 0));
6655 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6656 NULL_TREE if no simplification is possible. */
6658 static tree
6659 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6661 tree inner, fndecl;
6662 enum tree_code code;
6664 /* If this is a builtin_expect within a builtin_expect keep the
6665 inner one. See through a comparison against a constant. It
6666 might have been added to create a thruthvalue. */
6667 inner = arg0;
6668 if (COMPARISON_CLASS_P (inner)
6669 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6670 inner = TREE_OPERAND (inner, 0);
6672 if (TREE_CODE (inner) == CALL_EXPR
6673 && (fndecl = get_callee_fndecl (inner))
6674 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6675 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6676 return arg0;
6678 /* Distribute the expected value over short-circuiting operators.
6679 See through the cast from truthvalue_type_node to long. */
6680 inner = arg0;
6681 while (TREE_CODE (inner) == NOP_EXPR
6682 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6683 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6684 inner = TREE_OPERAND (inner, 0);
6686 code = TREE_CODE (inner);
6687 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6689 tree op0 = TREE_OPERAND (inner, 0);
6690 tree op1 = TREE_OPERAND (inner, 1);
6692 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6693 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6694 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6696 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6699 /* If the argument isn't invariant then there's nothing else we can do. */
6700 if (!TREE_CONSTANT (arg0))
6701 return NULL_TREE;
6703 /* If we expect that a comparison against the argument will fold to
6704 a constant return the constant. In practice, this means a true
6705 constant or the address of a non-weak symbol. */
6706 inner = arg0;
6707 STRIP_NOPS (inner);
6708 if (TREE_CODE (inner) == ADDR_EXPR)
6712 inner = TREE_OPERAND (inner, 0);
6714 while (TREE_CODE (inner) == COMPONENT_REF
6715 || TREE_CODE (inner) == ARRAY_REF);
6716 if ((TREE_CODE (inner) == VAR_DECL
6717 || TREE_CODE (inner) == FUNCTION_DECL)
6718 && DECL_WEAK (inner))
6719 return NULL_TREE;
6722 /* Otherwise, ARG0 already has the proper type for the return value. */
6723 return arg0;
6726 /* Fold a call to __builtin_classify_type with argument ARG. */
6728 static tree
6729 fold_builtin_classify_type (tree arg)
6731 if (arg == 0)
6732 return build_int_cst (NULL_TREE, no_type_class);
6734 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6737 /* Fold a call to __builtin_strlen with argument ARG. */
6739 static tree
6740 fold_builtin_strlen (location_t loc, tree type, tree arg)
6742 if (!validate_arg (arg, POINTER_TYPE))
6743 return NULL_TREE;
6744 else
6746 tree len = c_strlen (arg, 0);
6748 if (len)
6749 return fold_convert_loc (loc, type, len);
6751 return NULL_TREE;
6755 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6757 static tree
6758 fold_builtin_inf (location_t loc, tree type, int warn)
6760 REAL_VALUE_TYPE real;
6762 /* __builtin_inff is intended to be usable to define INFINITY on all
6763 targets. If an infinity is not available, INFINITY expands "to a
6764 positive constant of type float that overflows at translation
6765 time", footnote "In this case, using INFINITY will violate the
6766 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6767 Thus we pedwarn to ensure this constraint violation is
6768 diagnosed. */
6769 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6770 pedwarn (loc, 0, "target format does not support infinity");
6772 real_inf (&real);
6773 return build_real (type, real);
6776 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6778 static tree
6779 fold_builtin_nan (tree arg, tree type, int quiet)
6781 REAL_VALUE_TYPE real;
6782 const char *str;
6784 if (!validate_arg (arg, POINTER_TYPE))
6785 return NULL_TREE;
6786 str = c_getstr (arg);
6787 if (!str)
6788 return NULL_TREE;
6790 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6791 return NULL_TREE;
6793 return build_real (type, real);
6796 /* Return true if the floating point expression T has an integer value.
6797 We also allow +Inf, -Inf and NaN to be considered integer values. */
6799 static bool
6800 integer_valued_real_p (tree t)
6802 switch (TREE_CODE (t))
6804 case FLOAT_EXPR:
6805 return true;
6807 case ABS_EXPR:
6808 case SAVE_EXPR:
6809 return integer_valued_real_p (TREE_OPERAND (t, 0));
6811 case COMPOUND_EXPR:
6812 case MODIFY_EXPR:
6813 case BIND_EXPR:
6814 return integer_valued_real_p (TREE_OPERAND (t, 1));
6816 case PLUS_EXPR:
6817 case MINUS_EXPR:
6818 case MULT_EXPR:
6819 case MIN_EXPR:
6820 case MAX_EXPR:
6821 return integer_valued_real_p (TREE_OPERAND (t, 0))
6822 && integer_valued_real_p (TREE_OPERAND (t, 1));
6824 case COND_EXPR:
6825 return integer_valued_real_p (TREE_OPERAND (t, 1))
6826 && integer_valued_real_p (TREE_OPERAND (t, 2));
6828 case REAL_CST:
6829 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6831 case NOP_EXPR:
6833 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6834 if (TREE_CODE (type) == INTEGER_TYPE)
6835 return true;
6836 if (TREE_CODE (type) == REAL_TYPE)
6837 return integer_valued_real_p (TREE_OPERAND (t, 0));
6838 break;
6841 case CALL_EXPR:
6842 switch (builtin_mathfn_code (t))
6844 CASE_FLT_FN (BUILT_IN_CEIL):
6845 CASE_FLT_FN (BUILT_IN_FLOOR):
6846 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6847 CASE_FLT_FN (BUILT_IN_RINT):
6848 CASE_FLT_FN (BUILT_IN_ROUND):
6849 CASE_FLT_FN (BUILT_IN_TRUNC):
6850 return true;
6852 CASE_FLT_FN (BUILT_IN_FMIN):
6853 CASE_FLT_FN (BUILT_IN_FMAX):
6854 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6855 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6857 default:
6858 break;
6860 break;
6862 default:
6863 break;
6865 return false;
6868 /* FNDECL is assumed to be a builtin where truncation can be propagated
6869 across (for instance floor((double)f) == (double)floorf (f).
6870 Do the transformation for a call with argument ARG. */
6872 static tree
6873 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6875 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6877 if (!validate_arg (arg, REAL_TYPE))
6878 return NULL_TREE;
6880 /* Integer rounding functions are idempotent. */
6881 if (fcode == builtin_mathfn_code (arg))
6882 return arg;
6884 /* If argument is already integer valued, and we don't need to worry
6885 about setting errno, there's no need to perform rounding. */
6886 if (! flag_errno_math && integer_valued_real_p (arg))
6887 return arg;
6889 if (optimize)
6891 tree arg0 = strip_float_extensions (arg);
6892 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6893 tree newtype = TREE_TYPE (arg0);
6894 tree decl;
6896 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6897 && (decl = mathfn_built_in (newtype, fcode)))
6898 return fold_convert_loc (loc, ftype,
6899 build_call_expr_loc (loc, decl, 1,
6900 fold_convert_loc (loc,
6901 newtype,
6902 arg0)));
6904 return NULL_TREE;
6907 /* FNDECL is assumed to be builtin which can narrow the FP type of
6908 the argument, for instance lround((double)f) -> lroundf (f).
6909 Do the transformation for a call with argument ARG. */
6911 static tree
6912 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6914 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6916 if (!validate_arg (arg, REAL_TYPE))
6917 return NULL_TREE;
6919 /* If argument is already integer valued, and we don't need to worry
6920 about setting errno, there's no need to perform rounding. */
6921 if (! flag_errno_math && integer_valued_real_p (arg))
6922 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6923 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6925 if (optimize)
6927 tree ftype = TREE_TYPE (arg);
6928 tree arg0 = strip_float_extensions (arg);
6929 tree newtype = TREE_TYPE (arg0);
6930 tree decl;
6932 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6933 && (decl = mathfn_built_in (newtype, fcode)))
6934 return build_call_expr_loc (loc, decl, 1,
6935 fold_convert_loc (loc, newtype, arg0));
6938 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6939 sizeof (long long) == sizeof (long). */
6940 if (TYPE_PRECISION (long_long_integer_type_node)
6941 == TYPE_PRECISION (long_integer_type_node))
6943 tree newfn = NULL_TREE;
6944 switch (fcode)
6946 CASE_FLT_FN (BUILT_IN_LLCEIL):
6947 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6948 break;
6950 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6951 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6952 break;
6954 CASE_FLT_FN (BUILT_IN_LLROUND):
6955 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6956 break;
6958 CASE_FLT_FN (BUILT_IN_LLRINT):
6959 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6960 break;
6962 default:
6963 break;
6966 if (newfn)
6968 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6969 return fold_convert_loc (loc,
6970 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6974 return NULL_TREE;
6977 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6978 return type. Return NULL_TREE if no simplification can be made. */
6980 static tree
6981 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6983 tree res;
6985 if (!validate_arg (arg, COMPLEX_TYPE)
6986 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6987 return NULL_TREE;
6989 /* Calculate the result when the argument is a constant. */
6990 if (TREE_CODE (arg) == COMPLEX_CST
6991 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6992 type, mpfr_hypot)))
6993 return res;
6995 if (TREE_CODE (arg) == COMPLEX_EXPR)
6997 tree real = TREE_OPERAND (arg, 0);
6998 tree imag = TREE_OPERAND (arg, 1);
7000 /* If either part is zero, cabs is fabs of the other. */
7001 if (real_zerop (real))
7002 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7003 if (real_zerop (imag))
7004 return fold_build1_loc (loc, ABS_EXPR, type, real);
7006 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7007 if (flag_unsafe_math_optimizations
7008 && operand_equal_p (real, imag, OEP_PURE_SAME))
7010 const REAL_VALUE_TYPE sqrt2_trunc
7011 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7012 STRIP_NOPS (real);
7013 return fold_build2_loc (loc, MULT_EXPR, type,
7014 fold_build1_loc (loc, ABS_EXPR, type, real),
7015 build_real (type, sqrt2_trunc));
7019 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7020 if (TREE_CODE (arg) == NEGATE_EXPR
7021 || TREE_CODE (arg) == CONJ_EXPR)
7022 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7024 /* Don't do this when optimizing for size. */
7025 if (flag_unsafe_math_optimizations
7026 && optimize && optimize_function_for_speed_p (cfun))
7028 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7030 if (sqrtfn != NULL_TREE)
7032 tree rpart, ipart, result;
7034 arg = builtin_save_expr (arg);
7036 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7037 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7039 rpart = builtin_save_expr (rpart);
7040 ipart = builtin_save_expr (ipart);
7042 result = fold_build2_loc (loc, PLUS_EXPR, type,
7043 fold_build2_loc (loc, MULT_EXPR, type,
7044 rpart, rpart),
7045 fold_build2_loc (loc, MULT_EXPR, type,
7046 ipart, ipart));
7048 return build_call_expr_loc (loc, sqrtfn, 1, result);
7052 return NULL_TREE;
7055 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7056 Return NULL_TREE if no simplification can be made. */
7058 static tree
7059 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7062 enum built_in_function fcode;
7063 tree res;
7065 if (!validate_arg (arg, REAL_TYPE))
7066 return NULL_TREE;
7068 /* Calculate the result when the argument is a constant. */
7069 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7070 return res;
7072 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7073 fcode = builtin_mathfn_code (arg);
7074 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7076 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7077 arg = fold_build2_loc (loc, MULT_EXPR, type,
7078 CALL_EXPR_ARG (arg, 0),
7079 build_real (type, dconsthalf));
7080 return build_call_expr_loc (loc, expfn, 1, arg);
7083 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7084 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7086 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7088 if (powfn)
7090 tree arg0 = CALL_EXPR_ARG (arg, 0);
7091 tree tree_root;
7092 /* The inner root was either sqrt or cbrt. */
7093 /* This was a conditional expression but it triggered a bug
7094 in Sun C 5.5. */
7095 REAL_VALUE_TYPE dconstroot;
7096 if (BUILTIN_SQRT_P (fcode))
7097 dconstroot = dconsthalf;
7098 else
7099 dconstroot = dconst_third ();
7101 /* Adjust for the outer root. */
7102 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7103 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7104 tree_root = build_real (type, dconstroot);
7105 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7109 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7110 if (flag_unsafe_math_optimizations
7111 && (fcode == BUILT_IN_POW
7112 || fcode == BUILT_IN_POWF
7113 || fcode == BUILT_IN_POWL))
7115 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7116 tree arg0 = CALL_EXPR_ARG (arg, 0);
7117 tree arg1 = CALL_EXPR_ARG (arg, 1);
7118 tree narg1;
7119 if (!tree_expr_nonnegative_p (arg0))
7120 arg0 = build1 (ABS_EXPR, type, arg0);
7121 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7122 build_real (type, dconsthalf));
7123 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7126 return NULL_TREE;
7129 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7130 Return NULL_TREE if no simplification can be made. */
7132 static tree
7133 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7135 const enum built_in_function fcode = builtin_mathfn_code (arg);
7136 tree res;
7138 if (!validate_arg (arg, REAL_TYPE))
7139 return NULL_TREE;
7141 /* Calculate the result when the argument is a constant. */
7142 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7143 return res;
7145 if (flag_unsafe_math_optimizations)
7147 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7148 if (BUILTIN_EXPONENT_P (fcode))
7150 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7151 const REAL_VALUE_TYPE third_trunc =
7152 real_value_truncate (TYPE_MODE (type), dconst_third ());
7153 arg = fold_build2_loc (loc, MULT_EXPR, type,
7154 CALL_EXPR_ARG (arg, 0),
7155 build_real (type, third_trunc));
7156 return build_call_expr_loc (loc, expfn, 1, arg);
7159 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7160 if (BUILTIN_SQRT_P (fcode))
7162 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7164 if (powfn)
7166 tree arg0 = CALL_EXPR_ARG (arg, 0);
7167 tree tree_root;
7168 REAL_VALUE_TYPE dconstroot = dconst_third ();
7170 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7171 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7172 tree_root = build_real (type, dconstroot);
7173 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7177 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7178 if (BUILTIN_CBRT_P (fcode))
7180 tree arg0 = CALL_EXPR_ARG (arg, 0);
7181 if (tree_expr_nonnegative_p (arg0))
7183 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7185 if (powfn)
7187 tree tree_root;
7188 REAL_VALUE_TYPE dconstroot;
7190 real_arithmetic (&dconstroot, MULT_EXPR,
7191 dconst_third_ptr (), dconst_third_ptr ());
7192 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7193 tree_root = build_real (type, dconstroot);
7194 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7199 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7200 if (fcode == BUILT_IN_POW
7201 || fcode == BUILT_IN_POWF
7202 || fcode == BUILT_IN_POWL)
7204 tree arg00 = CALL_EXPR_ARG (arg, 0);
7205 tree arg01 = CALL_EXPR_ARG (arg, 1);
7206 if (tree_expr_nonnegative_p (arg00))
7208 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7209 const REAL_VALUE_TYPE dconstroot
7210 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7211 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7212 build_real (type, dconstroot));
7213 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7217 return NULL_TREE;
7220 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7221 TYPE is the type of the return value. Return NULL_TREE if no
7222 simplification can be made. */
7224 static tree
7225 fold_builtin_cos (location_t loc,
7226 tree arg, tree type, tree fndecl)
7228 tree res, narg;
7230 if (!validate_arg (arg, REAL_TYPE))
7231 return NULL_TREE;
7233 /* Calculate the result when the argument is a constant. */
7234 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7235 return res;
7237 /* Optimize cos(-x) into cos (x). */
7238 if ((narg = fold_strip_sign_ops (arg)))
7239 return build_call_expr_loc (loc, fndecl, 1, narg);
7241 return NULL_TREE;
7244 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7245 Return NULL_TREE if no simplification can be made. */
7247 static tree
7248 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7250 if (validate_arg (arg, REAL_TYPE))
7252 tree res, narg;
7254 /* Calculate the result when the argument is a constant. */
7255 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7256 return res;
7258 /* Optimize cosh(-x) into cosh (x). */
7259 if ((narg = fold_strip_sign_ops (arg)))
7260 return build_call_expr_loc (loc, fndecl, 1, narg);
7263 return NULL_TREE;
7266 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7267 argument ARG. TYPE is the type of the return value. Return
7268 NULL_TREE if no simplification can be made. */
7270 static tree
7271 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7272 bool hyper)
7274 if (validate_arg (arg, COMPLEX_TYPE)
7275 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7277 tree tmp;
7279 /* Calculate the result when the argument is a constant. */
7280 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7281 return tmp;
7283 /* Optimize fn(-x) into fn(x). */
7284 if ((tmp = fold_strip_sign_ops (arg)))
7285 return build_call_expr_loc (loc, fndecl, 1, tmp);
7288 return NULL_TREE;
7291 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7292 Return NULL_TREE if no simplification can be made. */
7294 static tree
7295 fold_builtin_tan (tree arg, tree type)
7297 enum built_in_function fcode;
7298 tree res;
7300 if (!validate_arg (arg, REAL_TYPE))
7301 return NULL_TREE;
7303 /* Calculate the result when the argument is a constant. */
7304 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7305 return res;
7307 /* Optimize tan(atan(x)) = x. */
7308 fcode = builtin_mathfn_code (arg);
7309 if (flag_unsafe_math_optimizations
7310 && (fcode == BUILT_IN_ATAN
7311 || fcode == BUILT_IN_ATANF
7312 || fcode == BUILT_IN_ATANL))
7313 return CALL_EXPR_ARG (arg, 0);
7315 return NULL_TREE;
7318 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7319 NULL_TREE if no simplification can be made. */
7321 static tree
7322 fold_builtin_sincos (location_t loc,
7323 tree arg0, tree arg1, tree arg2)
7325 tree type;
7326 tree res, fn, call;
7328 if (!validate_arg (arg0, REAL_TYPE)
7329 || !validate_arg (arg1, POINTER_TYPE)
7330 || !validate_arg (arg2, POINTER_TYPE))
7331 return NULL_TREE;
7333 type = TREE_TYPE (arg0);
7335 /* Calculate the result when the argument is a constant. */
7336 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7337 return res;
7339 /* Canonicalize sincos to cexpi. */
7340 if (!TARGET_C99_FUNCTIONS)
7341 return NULL_TREE;
7342 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7343 if (!fn)
7344 return NULL_TREE;
7346 call = build_call_expr_loc (loc, fn, 1, arg0);
7347 call = builtin_save_expr (call);
7349 return build2 (COMPOUND_EXPR, void_type_node,
7350 build2 (MODIFY_EXPR, void_type_node,
7351 build_fold_indirect_ref_loc (loc, arg1),
7352 build1 (IMAGPART_EXPR, type, call)),
7353 build2 (MODIFY_EXPR, void_type_node,
7354 build_fold_indirect_ref_loc (loc, arg2),
7355 build1 (REALPART_EXPR, type, call)));
7358 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7359 NULL_TREE if no simplification can be made. */
7361 static tree
7362 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7364 tree rtype;
7365 tree realp, imagp, ifn;
7366 tree res;
7368 if (!validate_arg (arg0, COMPLEX_TYPE)
7369 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7370 return NULL_TREE;
7372 /* Calculate the result when the argument is a constant. */
7373 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7374 return res;
7376 rtype = TREE_TYPE (TREE_TYPE (arg0));
7378 /* In case we can figure out the real part of arg0 and it is constant zero
7379 fold to cexpi. */
7380 if (!TARGET_C99_FUNCTIONS)
7381 return NULL_TREE;
7382 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7383 if (!ifn)
7384 return NULL_TREE;
7386 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7387 && real_zerop (realp))
7389 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7390 return build_call_expr_loc (loc, ifn, 1, narg);
7393 /* In case we can easily decompose real and imaginary parts split cexp
7394 to exp (r) * cexpi (i). */
7395 if (flag_unsafe_math_optimizations
7396 && realp)
7398 tree rfn, rcall, icall;
7400 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7401 if (!rfn)
7402 return NULL_TREE;
7404 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7405 if (!imagp)
7406 return NULL_TREE;
7408 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7409 icall = builtin_save_expr (icall);
7410 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7411 rcall = builtin_save_expr (rcall);
7412 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7413 fold_build2_loc (loc, MULT_EXPR, rtype,
7414 rcall,
7415 fold_build1_loc (loc, REALPART_EXPR,
7416 rtype, icall)),
7417 fold_build2_loc (loc, MULT_EXPR, rtype,
7418 rcall,
7419 fold_build1_loc (loc, IMAGPART_EXPR,
7420 rtype, icall)));
7423 return NULL_TREE;
7426 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7427 Return NULL_TREE if no simplification can be made. */
7429 static tree
7430 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7432 if (!validate_arg (arg, REAL_TYPE))
7433 return NULL_TREE;
7435 /* Optimize trunc of constant value. */
7436 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7438 REAL_VALUE_TYPE r, x;
7439 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7441 x = TREE_REAL_CST (arg);
7442 real_trunc (&r, TYPE_MODE (type), &x);
7443 return build_real (type, r);
7446 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7449 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7450 Return NULL_TREE if no simplification can be made. */
7452 static tree
7453 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7455 if (!validate_arg (arg, REAL_TYPE))
7456 return NULL_TREE;
7458 /* Optimize floor of constant value. */
7459 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7461 REAL_VALUE_TYPE x;
7463 x = TREE_REAL_CST (arg);
7464 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7466 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7467 REAL_VALUE_TYPE r;
7469 real_floor (&r, TYPE_MODE (type), &x);
7470 return build_real (type, r);
7474 /* Fold floor (x) where x is nonnegative to trunc (x). */
7475 if (tree_expr_nonnegative_p (arg))
7477 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7478 if (truncfn)
7479 return build_call_expr_loc (loc, truncfn, 1, arg);
7482 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7485 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7486 Return NULL_TREE if no simplification can be made. */
7488 static tree
7489 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7491 if (!validate_arg (arg, REAL_TYPE))
7492 return NULL_TREE;
7494 /* Optimize ceil of constant value. */
7495 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7497 REAL_VALUE_TYPE x;
7499 x = TREE_REAL_CST (arg);
7500 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7502 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7503 REAL_VALUE_TYPE r;
7505 real_ceil (&r, TYPE_MODE (type), &x);
7506 return build_real (type, r);
7510 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7513 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7514 Return NULL_TREE if no simplification can be made. */
7516 static tree
7517 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7519 if (!validate_arg (arg, REAL_TYPE))
7520 return NULL_TREE;
7522 /* Optimize round of constant value. */
7523 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7525 REAL_VALUE_TYPE x;
7527 x = TREE_REAL_CST (arg);
7528 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7530 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7531 REAL_VALUE_TYPE r;
7533 real_round (&r, TYPE_MODE (type), &x);
7534 return build_real (type, r);
7538 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7541 /* Fold function call to builtin lround, lroundf or lroundl (or the
7542 corresponding long long versions) and other rounding functions. ARG
7543 is the argument to the call. Return NULL_TREE if no simplification
7544 can be made. */
7546 static tree
7547 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7549 if (!validate_arg (arg, REAL_TYPE))
7550 return NULL_TREE;
7552 /* Optimize lround of constant value. */
7553 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7555 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7557 if (real_isfinite (&x))
7559 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7560 tree ftype = TREE_TYPE (arg);
7561 unsigned HOST_WIDE_INT lo2;
7562 HOST_WIDE_INT hi, lo;
7563 REAL_VALUE_TYPE r;
7565 switch (DECL_FUNCTION_CODE (fndecl))
7567 CASE_FLT_FN (BUILT_IN_LFLOOR):
7568 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7569 real_floor (&r, TYPE_MODE (ftype), &x);
7570 break;
7572 CASE_FLT_FN (BUILT_IN_LCEIL):
7573 CASE_FLT_FN (BUILT_IN_LLCEIL):
7574 real_ceil (&r, TYPE_MODE (ftype), &x);
7575 break;
7577 CASE_FLT_FN (BUILT_IN_LROUND):
7578 CASE_FLT_FN (BUILT_IN_LLROUND):
7579 real_round (&r, TYPE_MODE (ftype), &x);
7580 break;
7582 default:
7583 gcc_unreachable ();
7586 REAL_VALUE_TO_INT (&lo, &hi, r);
7587 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7588 return build_int_cst_wide (itype, lo2, hi);
7592 switch (DECL_FUNCTION_CODE (fndecl))
7594 CASE_FLT_FN (BUILT_IN_LFLOOR):
7595 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7596 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7597 if (tree_expr_nonnegative_p (arg))
7598 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7599 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7600 break;
7601 default:;
7604 return fold_fixed_mathfn (loc, fndecl, arg);
7607 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7608 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7609 the argument to the call. Return NULL_TREE if no simplification can
7610 be made. */
7612 static tree
7613 fold_builtin_bitop (tree fndecl, tree arg)
7615 if (!validate_arg (arg, INTEGER_TYPE))
7616 return NULL_TREE;
7618 /* Optimize for constant argument. */
7619 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7621 HOST_WIDE_INT hi, width, result;
7622 unsigned HOST_WIDE_INT lo;
7623 tree type;
7625 type = TREE_TYPE (arg);
7626 width = TYPE_PRECISION (type);
7627 lo = TREE_INT_CST_LOW (arg);
7629 /* Clear all the bits that are beyond the type's precision. */
7630 if (width > HOST_BITS_PER_WIDE_INT)
7632 hi = TREE_INT_CST_HIGH (arg);
7633 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7634 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7636 else
7638 hi = 0;
7639 if (width < HOST_BITS_PER_WIDE_INT)
7640 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7643 switch (DECL_FUNCTION_CODE (fndecl))
7645 CASE_INT_FN (BUILT_IN_FFS):
7646 if (lo != 0)
7647 result = exact_log2 (lo & -lo) + 1;
7648 else if (hi != 0)
7649 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7650 else
7651 result = 0;
7652 break;
7654 CASE_INT_FN (BUILT_IN_CLZ):
7655 if (hi != 0)
7656 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7657 else if (lo != 0)
7658 result = width - floor_log2 (lo) - 1;
7659 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7660 result = width;
7661 break;
7663 CASE_INT_FN (BUILT_IN_CTZ):
7664 if (lo != 0)
7665 result = exact_log2 (lo & -lo);
7666 else if (hi != 0)
7667 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7668 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7669 result = width;
7670 break;
7672 CASE_INT_FN (BUILT_IN_POPCOUNT):
7673 result = 0;
7674 while (lo)
7675 result++, lo &= lo - 1;
7676 while (hi)
7677 result++, hi &= hi - 1;
7678 break;
7680 CASE_INT_FN (BUILT_IN_PARITY):
7681 result = 0;
7682 while (lo)
7683 result++, lo &= lo - 1;
7684 while (hi)
7685 result++, hi &= hi - 1;
7686 result &= 1;
7687 break;
7689 default:
7690 gcc_unreachable ();
7693 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7696 return NULL_TREE;
7699 /* Fold function call to builtin_bswap and the long and long long
7700 variants. Return NULL_TREE if no simplification can be made. */
7701 static tree
7702 fold_builtin_bswap (tree fndecl, tree arg)
7704 if (! validate_arg (arg, INTEGER_TYPE))
7705 return NULL_TREE;
7707 /* Optimize constant value. */
7708 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7710 HOST_WIDE_INT hi, width, r_hi = 0;
7711 unsigned HOST_WIDE_INT lo, r_lo = 0;
7712 tree type;
7714 type = TREE_TYPE (arg);
7715 width = TYPE_PRECISION (type);
7716 lo = TREE_INT_CST_LOW (arg);
7717 hi = TREE_INT_CST_HIGH (arg);
7719 switch (DECL_FUNCTION_CODE (fndecl))
7721 case BUILT_IN_BSWAP32:
7722 case BUILT_IN_BSWAP64:
7724 int s;
7726 for (s = 0; s < width; s += 8)
7728 int d = width - s - 8;
7729 unsigned HOST_WIDE_INT byte;
7731 if (s < HOST_BITS_PER_WIDE_INT)
7732 byte = (lo >> s) & 0xff;
7733 else
7734 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7736 if (d < HOST_BITS_PER_WIDE_INT)
7737 r_lo |= byte << d;
7738 else
7739 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7743 break;
7745 default:
7746 gcc_unreachable ();
7749 if (width < HOST_BITS_PER_WIDE_INT)
7750 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7751 else
7752 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7755 return NULL_TREE;
7758 /* A subroutine of fold_builtin to fold the various logarithmic
7759 functions. Return NULL_TREE if no simplification can me made.
7760 FUNC is the corresponding MPFR logarithm function. */
7762 static tree
7763 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7764 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7766 if (validate_arg (arg, REAL_TYPE))
7768 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7769 tree res;
7770 const enum built_in_function fcode = builtin_mathfn_code (arg);
7772 /* Calculate the result when the argument is a constant. */
7773 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7774 return res;
7776 /* Special case, optimize logN(expN(x)) = x. */
7777 if (flag_unsafe_math_optimizations
7778 && ((func == mpfr_log
7779 && (fcode == BUILT_IN_EXP
7780 || fcode == BUILT_IN_EXPF
7781 || fcode == BUILT_IN_EXPL))
7782 || (func == mpfr_log2
7783 && (fcode == BUILT_IN_EXP2
7784 || fcode == BUILT_IN_EXP2F
7785 || fcode == BUILT_IN_EXP2L))
7786 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7787 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7789 /* Optimize logN(func()) for various exponential functions. We
7790 want to determine the value "x" and the power "exponent" in
7791 order to transform logN(x**exponent) into exponent*logN(x). */
7792 if (flag_unsafe_math_optimizations)
7794 tree exponent = 0, x = 0;
7796 switch (fcode)
7798 CASE_FLT_FN (BUILT_IN_EXP):
7799 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7800 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7801 dconst_e ()));
7802 exponent = CALL_EXPR_ARG (arg, 0);
7803 break;
7804 CASE_FLT_FN (BUILT_IN_EXP2):
7805 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7806 x = build_real (type, dconst2);
7807 exponent = CALL_EXPR_ARG (arg, 0);
7808 break;
7809 CASE_FLT_FN (BUILT_IN_EXP10):
7810 CASE_FLT_FN (BUILT_IN_POW10):
7811 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7813 REAL_VALUE_TYPE dconst10;
7814 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7815 x = build_real (type, dconst10);
7817 exponent = CALL_EXPR_ARG (arg, 0);
7818 break;
7819 CASE_FLT_FN (BUILT_IN_SQRT):
7820 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7821 x = CALL_EXPR_ARG (arg, 0);
7822 exponent = build_real (type, dconsthalf);
7823 break;
7824 CASE_FLT_FN (BUILT_IN_CBRT):
7825 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7826 x = CALL_EXPR_ARG (arg, 0);
7827 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7828 dconst_third ()));
7829 break;
7830 CASE_FLT_FN (BUILT_IN_POW):
7831 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7832 x = CALL_EXPR_ARG (arg, 0);
7833 exponent = CALL_EXPR_ARG (arg, 1);
7834 break;
7835 default:
7836 break;
7839 /* Now perform the optimization. */
7840 if (x && exponent)
7842 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7843 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7848 return NULL_TREE;
7851 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7852 NULL_TREE if no simplification can be made. */
7854 static tree
7855 fold_builtin_hypot (location_t loc, tree fndecl,
7856 tree arg0, tree arg1, tree type)
7858 tree res, narg0, narg1;
7860 if (!validate_arg (arg0, REAL_TYPE)
7861 || !validate_arg (arg1, REAL_TYPE))
7862 return NULL_TREE;
7864 /* Calculate the result when the argument is a constant. */
7865 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7866 return res;
7868 /* If either argument to hypot has a negate or abs, strip that off.
7869 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7870 narg0 = fold_strip_sign_ops (arg0);
7871 narg1 = fold_strip_sign_ops (arg1);
7872 if (narg0 || narg1)
7874 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7875 narg1 ? narg1 : arg1);
7878 /* If either argument is zero, hypot is fabs of the other. */
7879 if (real_zerop (arg0))
7880 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7881 else if (real_zerop (arg1))
7882 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7884 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7885 if (flag_unsafe_math_optimizations
7886 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7888 const REAL_VALUE_TYPE sqrt2_trunc
7889 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7890 return fold_build2_loc (loc, MULT_EXPR, type,
7891 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7892 build_real (type, sqrt2_trunc));
7895 return NULL_TREE;
7899 /* Fold a builtin function call to pow, powf, or powl. Return
7900 NULL_TREE if no simplification can be made. */
7901 static tree
7902 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7904 tree res;
7906 if (!validate_arg (arg0, REAL_TYPE)
7907 || !validate_arg (arg1, REAL_TYPE))
7908 return NULL_TREE;
7910 /* Calculate the result when the argument is a constant. */
7911 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7912 return res;
7914 /* Optimize pow(1.0,y) = 1.0. */
7915 if (real_onep (arg0))
7916 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7918 if (TREE_CODE (arg1) == REAL_CST
7919 && !TREE_OVERFLOW (arg1))
7921 REAL_VALUE_TYPE cint;
7922 REAL_VALUE_TYPE c;
7923 HOST_WIDE_INT n;
7925 c = TREE_REAL_CST (arg1);
7927 /* Optimize pow(x,0.0) = 1.0. */
7928 if (REAL_VALUES_EQUAL (c, dconst0))
7929 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7930 arg0);
7932 /* Optimize pow(x,1.0) = x. */
7933 if (REAL_VALUES_EQUAL (c, dconst1))
7934 return arg0;
7936 /* Optimize pow(x,-1.0) = 1.0/x. */
7937 if (REAL_VALUES_EQUAL (c, dconstm1))
7938 return fold_build2_loc (loc, RDIV_EXPR, type,
7939 build_real (type, dconst1), arg0);
7941 /* Optimize pow(x,0.5) = sqrt(x). */
7942 if (flag_unsafe_math_optimizations
7943 && REAL_VALUES_EQUAL (c, dconsthalf))
7945 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7947 if (sqrtfn != NULL_TREE)
7948 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7951 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7952 if (flag_unsafe_math_optimizations)
7954 const REAL_VALUE_TYPE dconstroot
7955 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7957 if (REAL_VALUES_EQUAL (c, dconstroot))
7959 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7960 if (cbrtfn != NULL_TREE)
7961 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7965 /* Check for an integer exponent. */
7966 n = real_to_integer (&c);
7967 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7968 if (real_identical (&c, &cint))
7970 /* Attempt to evaluate pow at compile-time, unless this should
7971 raise an exception. */
7972 if (TREE_CODE (arg0) == REAL_CST
7973 && !TREE_OVERFLOW (arg0)
7974 && (n > 0
7975 || (!flag_trapping_math && !flag_errno_math)
7976 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7978 REAL_VALUE_TYPE x;
7979 bool inexact;
7981 x = TREE_REAL_CST (arg0);
7982 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7983 if (flag_unsafe_math_optimizations || !inexact)
7984 return build_real (type, x);
7987 /* Strip sign ops from even integer powers. */
7988 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7990 tree narg0 = fold_strip_sign_ops (arg0);
7991 if (narg0)
7992 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7997 if (flag_unsafe_math_optimizations)
7999 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8001 /* Optimize pow(expN(x),y) = expN(x*y). */
8002 if (BUILTIN_EXPONENT_P (fcode))
8004 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8005 tree arg = CALL_EXPR_ARG (arg0, 0);
8006 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8007 return build_call_expr_loc (loc, expfn, 1, arg);
8010 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8011 if (BUILTIN_SQRT_P (fcode))
8013 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8014 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8015 build_real (type, dconsthalf));
8016 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8019 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8020 if (BUILTIN_CBRT_P (fcode))
8022 tree arg = CALL_EXPR_ARG (arg0, 0);
8023 if (tree_expr_nonnegative_p (arg))
8025 const REAL_VALUE_TYPE dconstroot
8026 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8027 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8028 build_real (type, dconstroot));
8029 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8033 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8034 if (fcode == BUILT_IN_POW
8035 || fcode == BUILT_IN_POWF
8036 || fcode == BUILT_IN_POWL)
8038 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8039 if (tree_expr_nonnegative_p (arg00))
8041 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8042 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8043 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8048 return NULL_TREE;
8051 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8052 Return NULL_TREE if no simplification can be made. */
8053 static tree
8054 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8055 tree arg0, tree arg1, tree type)
8057 if (!validate_arg (arg0, REAL_TYPE)
8058 || !validate_arg (arg1, INTEGER_TYPE))
8059 return NULL_TREE;
8061 /* Optimize pow(1.0,y) = 1.0. */
8062 if (real_onep (arg0))
8063 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8065 if (host_integerp (arg1, 0))
8067 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8069 /* Evaluate powi at compile-time. */
8070 if (TREE_CODE (arg0) == REAL_CST
8071 && !TREE_OVERFLOW (arg0))
8073 REAL_VALUE_TYPE x;
8074 x = TREE_REAL_CST (arg0);
8075 real_powi (&x, TYPE_MODE (type), &x, c);
8076 return build_real (type, x);
8079 /* Optimize pow(x,0) = 1.0. */
8080 if (c == 0)
8081 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8082 arg0);
8084 /* Optimize pow(x,1) = x. */
8085 if (c == 1)
8086 return arg0;
8088 /* Optimize pow(x,-1) = 1.0/x. */
8089 if (c == -1)
8090 return fold_build2_loc (loc, RDIV_EXPR, type,
8091 build_real (type, dconst1), arg0);
8094 return NULL_TREE;
8097 /* A subroutine of fold_builtin to fold the various exponent
8098 functions. Return NULL_TREE if no simplification can be made.
8099 FUNC is the corresponding MPFR exponent function. */
8101 static tree
8102 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8103 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8105 if (validate_arg (arg, REAL_TYPE))
8107 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8108 tree res;
8110 /* Calculate the result when the argument is a constant. */
8111 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8112 return res;
8114 /* Optimize expN(logN(x)) = x. */
8115 if (flag_unsafe_math_optimizations)
8117 const enum built_in_function fcode = builtin_mathfn_code (arg);
8119 if ((func == mpfr_exp
8120 && (fcode == BUILT_IN_LOG
8121 || fcode == BUILT_IN_LOGF
8122 || fcode == BUILT_IN_LOGL))
8123 || (func == mpfr_exp2
8124 && (fcode == BUILT_IN_LOG2
8125 || fcode == BUILT_IN_LOG2F
8126 || fcode == BUILT_IN_LOG2L))
8127 || (func == mpfr_exp10
8128 && (fcode == BUILT_IN_LOG10
8129 || fcode == BUILT_IN_LOG10F
8130 || fcode == BUILT_IN_LOG10L)))
8131 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8135 return NULL_TREE;
8138 /* Return true if VAR is a VAR_DECL or a component thereof. */
8140 static bool
8141 var_decl_component_p (tree var)
8143 tree inner = var;
8144 while (handled_component_p (inner))
8145 inner = TREE_OPERAND (inner, 0);
8146 return SSA_VAR_P (inner);
8149 /* Fold function call to builtin memset. Return
8150 NULL_TREE if no simplification can be made. */
8152 static tree
8153 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8154 tree type, bool ignore)
8156 tree var, ret, etype;
8157 unsigned HOST_WIDE_INT length, cval;
8159 if (! validate_arg (dest, POINTER_TYPE)
8160 || ! validate_arg (c, INTEGER_TYPE)
8161 || ! validate_arg (len, INTEGER_TYPE))
8162 return NULL_TREE;
8164 if (! host_integerp (len, 1))
8165 return NULL_TREE;
8167 /* If the LEN parameter is zero, return DEST. */
8168 if (integer_zerop (len))
8169 return omit_one_operand_loc (loc, type, dest, c);
8171 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8172 return NULL_TREE;
8174 var = dest;
8175 STRIP_NOPS (var);
8176 if (TREE_CODE (var) != ADDR_EXPR)
8177 return NULL_TREE;
8179 var = TREE_OPERAND (var, 0);
8180 if (TREE_THIS_VOLATILE (var))
8181 return NULL_TREE;
8183 etype = TREE_TYPE (var);
8184 if (TREE_CODE (etype) == ARRAY_TYPE)
8185 etype = TREE_TYPE (etype);
8187 if (!INTEGRAL_TYPE_P (etype)
8188 && !POINTER_TYPE_P (etype))
8189 return NULL_TREE;
8191 if (! var_decl_component_p (var))
8192 return NULL_TREE;
8194 length = tree_low_cst (len, 1);
8195 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8196 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8197 < (int) length)
8198 return NULL_TREE;
8200 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8201 return NULL_TREE;
8203 if (integer_zerop (c))
8204 cval = 0;
8205 else
8207 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8208 return NULL_TREE;
8210 cval = tree_low_cst (c, 1);
8211 cval &= 0xff;
8212 cval |= cval << 8;
8213 cval |= cval << 16;
8214 cval |= (cval << 31) << 1;
8217 ret = build_int_cst_type (etype, cval);
8218 var = build_fold_indirect_ref_loc (loc,
8219 fold_convert_loc (loc,
8220 build_pointer_type (etype),
8221 dest));
8222 ret = build2 (MODIFY_EXPR, etype, var, ret);
8223 if (ignore)
8224 return ret;
8226 return omit_one_operand_loc (loc, type, dest, ret);
8229 /* Fold function call to builtin memset. Return
8230 NULL_TREE if no simplification can be made. */
8232 static tree
8233 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8235 if (! validate_arg (dest, POINTER_TYPE)
8236 || ! validate_arg (size, INTEGER_TYPE))
8237 return NULL_TREE;
8239 if (!ignore)
8240 return NULL_TREE;
8242 /* New argument list transforming bzero(ptr x, int y) to
8243 memset(ptr x, int 0, size_t y). This is done this way
8244 so that if it isn't expanded inline, we fallback to
8245 calling bzero instead of memset. */
8247 return fold_builtin_memset (loc, dest, integer_zero_node,
8248 fold_convert_loc (loc, sizetype, size),
8249 void_type_node, ignore);
8252 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8253 NULL_TREE if no simplification can be made.
8254 If ENDP is 0, return DEST (like memcpy).
8255 If ENDP is 1, return DEST+LEN (like mempcpy).
8256 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8257 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8258 (memmove). */
8260 static tree
8261 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8262 tree len, tree type, bool ignore, int endp)
8264 tree destvar, srcvar, expr;
8266 if (! validate_arg (dest, POINTER_TYPE)
8267 || ! validate_arg (src, POINTER_TYPE)
8268 || ! validate_arg (len, INTEGER_TYPE))
8269 return NULL_TREE;
8271 /* If the LEN parameter is zero, return DEST. */
8272 if (integer_zerop (len))
8273 return omit_one_operand_loc (loc, type, dest, src);
8275 /* If SRC and DEST are the same (and not volatile), return
8276 DEST{,+LEN,+LEN-1}. */
8277 if (operand_equal_p (src, dest, 0))
8278 expr = len;
8279 else
8281 tree srctype, desttype;
8282 int src_align, dest_align;
8284 if (endp == 3)
8286 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8287 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8289 /* Both DEST and SRC must be pointer types.
8290 ??? This is what old code did. Is the testing for pointer types
8291 really mandatory?
8293 If either SRC is readonly or length is 1, we can use memcpy. */
8294 if (!dest_align || !src_align)
8295 return NULL_TREE;
8296 if (readonly_data_expr (src)
8297 || (host_integerp (len, 1)
8298 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8299 >= tree_low_cst (len, 1))))
8301 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8302 if (!fn)
8303 return NULL_TREE;
8304 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8307 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8308 srcvar = build_fold_indirect_ref_loc (loc, src);
8309 destvar = build_fold_indirect_ref_loc (loc, dest);
8310 if (srcvar
8311 && !TREE_THIS_VOLATILE (srcvar)
8312 && destvar
8313 && !TREE_THIS_VOLATILE (destvar))
8315 tree src_base, dest_base, fn;
8316 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8317 HOST_WIDE_INT size = -1;
8318 HOST_WIDE_INT maxsize = -1;
8320 src_base = srcvar;
8321 if (handled_component_p (src_base))
8322 src_base = get_ref_base_and_extent (src_base, &src_offset,
8323 &size, &maxsize);
8324 dest_base = destvar;
8325 if (handled_component_p (dest_base))
8326 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8327 &size, &maxsize);
8328 if (host_integerp (len, 1))
8330 maxsize = tree_low_cst (len, 1);
8331 if (maxsize
8332 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8333 maxsize = -1;
8334 else
8335 maxsize *= BITS_PER_UNIT;
8337 else
8338 maxsize = -1;
8339 if (SSA_VAR_P (src_base)
8340 && SSA_VAR_P (dest_base))
8342 if (operand_equal_p (src_base, dest_base, 0)
8343 && ranges_overlap_p (src_offset, maxsize,
8344 dest_offset, maxsize))
8345 return NULL_TREE;
8347 else if (TREE_CODE (src_base) == INDIRECT_REF
8348 && TREE_CODE (dest_base) == INDIRECT_REF)
8350 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8351 TREE_OPERAND (dest_base, 0), 0)
8352 || ranges_overlap_p (src_offset, maxsize,
8353 dest_offset, maxsize))
8354 return NULL_TREE;
8356 else
8357 return NULL_TREE;
8359 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8360 if (!fn)
8361 return NULL_TREE;
8362 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8364 return NULL_TREE;
8367 if (!host_integerp (len, 0))
8368 return NULL_TREE;
8369 /* FIXME:
8370 This logic lose for arguments like (type *)malloc (sizeof (type)),
8371 since we strip the casts of up to VOID return value from malloc.
8372 Perhaps we ought to inherit type from non-VOID argument here? */
8373 STRIP_NOPS (src);
8374 STRIP_NOPS (dest);
8375 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8376 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8378 tree tem = TREE_OPERAND (src, 0);
8379 STRIP_NOPS (tem);
8380 if (tem != TREE_OPERAND (src, 0))
8381 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8383 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8385 tree tem = TREE_OPERAND (dest, 0);
8386 STRIP_NOPS (tem);
8387 if (tem != TREE_OPERAND (dest, 0))
8388 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8390 srctype = TREE_TYPE (TREE_TYPE (src));
8391 if (srctype
8392 && TREE_CODE (srctype) == ARRAY_TYPE
8393 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8395 srctype = TREE_TYPE (srctype);
8396 STRIP_NOPS (src);
8397 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8399 desttype = TREE_TYPE (TREE_TYPE (dest));
8400 if (desttype
8401 && TREE_CODE (desttype) == ARRAY_TYPE
8402 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8404 desttype = TREE_TYPE (desttype);
8405 STRIP_NOPS (dest);
8406 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8408 if (!srctype || !desttype
8409 || !TYPE_SIZE_UNIT (srctype)
8410 || !TYPE_SIZE_UNIT (desttype)
8411 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8412 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8413 || TYPE_VOLATILE (srctype)
8414 || TYPE_VOLATILE (desttype))
8415 return NULL_TREE;
8417 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8418 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8419 if (dest_align < (int) TYPE_ALIGN (desttype)
8420 || src_align < (int) TYPE_ALIGN (srctype))
8421 return NULL_TREE;
8423 if (!ignore)
8424 dest = builtin_save_expr (dest);
8426 srcvar = NULL_TREE;
8427 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8429 srcvar = build_fold_indirect_ref_loc (loc, src);
8430 if (TREE_THIS_VOLATILE (srcvar))
8431 return NULL_TREE;
8432 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8433 srcvar = NULL_TREE;
8434 /* With memcpy, it is possible to bypass aliasing rules, so without
8435 this check i.e. execute/20060930-2.c would be misoptimized,
8436 because it use conflicting alias set to hold argument for the
8437 memcpy call. This check is probably unnecessary with
8438 -fno-strict-aliasing. Similarly for destvar. See also
8439 PR29286. */
8440 else if (!var_decl_component_p (srcvar))
8441 srcvar = NULL_TREE;
8444 destvar = NULL_TREE;
8445 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8447 destvar = build_fold_indirect_ref_loc (loc, dest);
8448 if (TREE_THIS_VOLATILE (destvar))
8449 return NULL_TREE;
8450 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8451 destvar = NULL_TREE;
8452 else if (!var_decl_component_p (destvar))
8453 destvar = NULL_TREE;
8456 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8457 return NULL_TREE;
8459 if (srcvar == NULL_TREE)
8461 tree srcptype;
8462 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8463 return NULL_TREE;
8465 srctype = build_qualified_type (desttype, 0);
8466 if (src_align < (int) TYPE_ALIGN (srctype))
8468 if (AGGREGATE_TYPE_P (srctype)
8469 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8470 return NULL_TREE;
8472 srctype = build_variant_type_copy (srctype);
8473 TYPE_ALIGN (srctype) = src_align;
8474 TYPE_USER_ALIGN (srctype) = 1;
8475 TYPE_PACKED (srctype) = 1;
8477 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8478 src = fold_convert_loc (loc, srcptype, src);
8479 srcvar = build_fold_indirect_ref_loc (loc, src);
8481 else if (destvar == NULL_TREE)
8483 tree destptype;
8484 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8485 return NULL_TREE;
8487 desttype = build_qualified_type (srctype, 0);
8488 if (dest_align < (int) TYPE_ALIGN (desttype))
8490 if (AGGREGATE_TYPE_P (desttype)
8491 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8492 return NULL_TREE;
8494 desttype = build_variant_type_copy (desttype);
8495 TYPE_ALIGN (desttype) = dest_align;
8496 TYPE_USER_ALIGN (desttype) = 1;
8497 TYPE_PACKED (desttype) = 1;
8499 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8500 dest = fold_convert_loc (loc, destptype, dest);
8501 destvar = build_fold_indirect_ref_loc (loc, dest);
8504 if (srctype == desttype
8505 || (gimple_in_ssa_p (cfun)
8506 && useless_type_conversion_p (desttype, srctype)))
8507 expr = srcvar;
8508 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8509 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8510 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8511 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8512 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8513 else
8514 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8515 TREE_TYPE (destvar), srcvar);
8516 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8519 if (ignore)
8520 return expr;
8522 if (endp == 0 || endp == 3)
8523 return omit_one_operand_loc (loc, type, dest, expr);
8525 if (expr == len)
8526 expr = NULL_TREE;
8528 if (endp == 2)
8529 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8530 ssize_int (1));
8532 len = fold_convert_loc (loc, sizetype, len);
8533 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8534 dest = fold_convert_loc (loc, type, dest);
8535 if (expr)
8536 dest = omit_one_operand_loc (loc, type, dest, expr);
8537 return dest;
8540 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8541 If LEN is not NULL, it represents the length of the string to be
8542 copied. Return NULL_TREE if no simplification can be made. */
8544 tree
8545 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8547 tree fn;
8549 if (!validate_arg (dest, POINTER_TYPE)
8550 || !validate_arg (src, POINTER_TYPE))
8551 return NULL_TREE;
8553 /* If SRC and DEST are the same (and not volatile), return DEST. */
8554 if (operand_equal_p (src, dest, 0))
8555 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8557 if (optimize_function_for_size_p (cfun))
8558 return NULL_TREE;
8560 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8561 if (!fn)
8562 return NULL_TREE;
8564 if (!len)
8566 len = c_strlen (src, 1);
8567 if (! len || TREE_SIDE_EFFECTS (len))
8568 return NULL_TREE;
8571 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8572 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8573 build_call_expr_loc (loc, fn, 3, dest, src, len));
8576 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8577 Return NULL_TREE if no simplification can be made. */
8579 static tree
8580 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8582 tree fn, len, lenp1, call, type;
8584 if (!validate_arg (dest, POINTER_TYPE)
8585 || !validate_arg (src, POINTER_TYPE))
8586 return NULL_TREE;
8588 len = c_strlen (src, 1);
8589 if (!len
8590 || TREE_CODE (len) != INTEGER_CST)
8591 return NULL_TREE;
8593 if (optimize_function_for_size_p (cfun)
8594 /* If length is zero it's small enough. */
8595 && !integer_zerop (len))
8596 return NULL_TREE;
8598 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8599 if (!fn)
8600 return NULL_TREE;
8602 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8603 /* We use dest twice in building our expression. Save it from
8604 multiple expansions. */
8605 dest = builtin_save_expr (dest);
8606 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8608 type = TREE_TYPE (TREE_TYPE (fndecl));
8609 len = fold_convert_loc (loc, sizetype, len);
8610 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8611 dest = fold_convert_loc (loc, type, dest);
8612 dest = omit_one_operand_loc (loc, type, dest, call);
8613 return dest;
8616 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8617 If SLEN is not NULL, it represents the length of the source string.
8618 Return NULL_TREE if no simplification can be made. */
8620 tree
8621 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8622 tree src, tree len, tree slen)
8624 tree fn;
8626 if (!validate_arg (dest, POINTER_TYPE)
8627 || !validate_arg (src, POINTER_TYPE)
8628 || !validate_arg (len, INTEGER_TYPE))
8629 return NULL_TREE;
8631 /* If the LEN parameter is zero, return DEST. */
8632 if (integer_zerop (len))
8633 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8635 /* We can't compare slen with len as constants below if len is not a
8636 constant. */
8637 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8638 return NULL_TREE;
8640 if (!slen)
8641 slen = c_strlen (src, 1);
8643 /* Now, we must be passed a constant src ptr parameter. */
8644 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8645 return NULL_TREE;
8647 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8649 /* We do not support simplification of this case, though we do
8650 support it when expanding trees into RTL. */
8651 /* FIXME: generate a call to __builtin_memset. */
8652 if (tree_int_cst_lt (slen, len))
8653 return NULL_TREE;
8655 /* OK transform into builtin memcpy. */
8656 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8657 if (!fn)
8658 return NULL_TREE;
8659 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8660 build_call_expr_loc (loc, fn, 3, dest, src, len));
8663 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8664 arguments to the call, and TYPE is its return type.
8665 Return NULL_TREE if no simplification can be made. */
8667 static tree
8668 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8670 if (!validate_arg (arg1, POINTER_TYPE)
8671 || !validate_arg (arg2, INTEGER_TYPE)
8672 || !validate_arg (len, INTEGER_TYPE))
8673 return NULL_TREE;
8674 else
8676 const char *p1;
8678 if (TREE_CODE (arg2) != INTEGER_CST
8679 || !host_integerp (len, 1))
8680 return NULL_TREE;
8682 p1 = c_getstr (arg1);
8683 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8685 char c;
8686 const char *r;
8687 tree tem;
8689 if (target_char_cast (arg2, &c))
8690 return NULL_TREE;
8692 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8694 if (r == NULL)
8695 return build_int_cst (TREE_TYPE (arg1), 0);
8697 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8698 size_int (r - p1));
8699 return fold_convert_loc (loc, type, tem);
8701 return NULL_TREE;
8705 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8706 Return NULL_TREE if no simplification can be made. */
8708 static tree
8709 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8711 const char *p1, *p2;
8713 if (!validate_arg (arg1, POINTER_TYPE)
8714 || !validate_arg (arg2, POINTER_TYPE)
8715 || !validate_arg (len, INTEGER_TYPE))
8716 return NULL_TREE;
8718 /* If the LEN parameter is zero, return zero. */
8719 if (integer_zerop (len))
8720 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8721 arg1, arg2);
8723 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8724 if (operand_equal_p (arg1, arg2, 0))
8725 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8727 p1 = c_getstr (arg1);
8728 p2 = c_getstr (arg2);
8730 /* If all arguments are constant, and the value of len is not greater
8731 than the lengths of arg1 and arg2, evaluate at compile-time. */
8732 if (host_integerp (len, 1) && p1 && p2
8733 && compare_tree_int (len, strlen (p1) + 1) <= 0
8734 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8736 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8738 if (r > 0)
8739 return integer_one_node;
8740 else if (r < 0)
8741 return integer_minus_one_node;
8742 else
8743 return integer_zero_node;
8746 /* If len parameter is one, return an expression corresponding to
8747 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8748 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8750 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8751 tree cst_uchar_ptr_node
8752 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8754 tree ind1
8755 = fold_convert_loc (loc, integer_type_node,
8756 build1 (INDIRECT_REF, cst_uchar_node,
8757 fold_convert_loc (loc,
8758 cst_uchar_ptr_node,
8759 arg1)));
8760 tree ind2
8761 = fold_convert_loc (loc, integer_type_node,
8762 build1 (INDIRECT_REF, cst_uchar_node,
8763 fold_convert_loc (loc,
8764 cst_uchar_ptr_node,
8765 arg2)));
8766 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8769 return NULL_TREE;
8772 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8773 Return NULL_TREE if no simplification can be made. */
8775 static tree
8776 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8778 const char *p1, *p2;
8780 if (!validate_arg (arg1, POINTER_TYPE)
8781 || !validate_arg (arg2, POINTER_TYPE))
8782 return NULL_TREE;
8784 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8785 if (operand_equal_p (arg1, arg2, 0))
8786 return integer_zero_node;
8788 p1 = c_getstr (arg1);
8789 p2 = c_getstr (arg2);
8791 if (p1 && p2)
8793 const int i = strcmp (p1, p2);
8794 if (i < 0)
8795 return integer_minus_one_node;
8796 else if (i > 0)
8797 return integer_one_node;
8798 else
8799 return integer_zero_node;
8802 /* If the second arg is "", return *(const unsigned char*)arg1. */
8803 if (p2 && *p2 == '\0')
8805 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8806 tree cst_uchar_ptr_node
8807 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8809 return fold_convert_loc (loc, integer_type_node,
8810 build1 (INDIRECT_REF, cst_uchar_node,
8811 fold_convert_loc (loc,
8812 cst_uchar_ptr_node,
8813 arg1)));
8816 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8817 if (p1 && *p1 == '\0')
8819 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8820 tree cst_uchar_ptr_node
8821 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8823 tree temp
8824 = fold_convert_loc (loc, integer_type_node,
8825 build1 (INDIRECT_REF, cst_uchar_node,
8826 fold_convert_loc (loc,
8827 cst_uchar_ptr_node,
8828 arg2)));
8829 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8832 return NULL_TREE;
8835 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8836 Return NULL_TREE if no simplification can be made. */
8838 static tree
8839 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8841 const char *p1, *p2;
8843 if (!validate_arg (arg1, POINTER_TYPE)
8844 || !validate_arg (arg2, POINTER_TYPE)
8845 || !validate_arg (len, INTEGER_TYPE))
8846 return NULL_TREE;
8848 /* If the LEN parameter is zero, return zero. */
8849 if (integer_zerop (len))
8850 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8851 arg1, arg2);
8853 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8854 if (operand_equal_p (arg1, arg2, 0))
8855 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8857 p1 = c_getstr (arg1);
8858 p2 = c_getstr (arg2);
8860 if (host_integerp (len, 1) && p1 && p2)
8862 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8863 if (i > 0)
8864 return integer_one_node;
8865 else if (i < 0)
8866 return integer_minus_one_node;
8867 else
8868 return integer_zero_node;
8871 /* If the second arg is "", and the length is greater than zero,
8872 return *(const unsigned char*)arg1. */
8873 if (p2 && *p2 == '\0'
8874 && TREE_CODE (len) == INTEGER_CST
8875 && tree_int_cst_sgn (len) == 1)
8877 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8878 tree cst_uchar_ptr_node
8879 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8881 return fold_convert_loc (loc, integer_type_node,
8882 build1 (INDIRECT_REF, cst_uchar_node,
8883 fold_convert_loc (loc,
8884 cst_uchar_ptr_node,
8885 arg1)));
8888 /* If the first arg is "", and the length is greater than zero,
8889 return -*(const unsigned char*)arg2. */
8890 if (p1 && *p1 == '\0'
8891 && TREE_CODE (len) == INTEGER_CST
8892 && tree_int_cst_sgn (len) == 1)
8894 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8895 tree cst_uchar_ptr_node
8896 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8898 tree temp = fold_convert_loc (loc, integer_type_node,
8899 build1 (INDIRECT_REF, cst_uchar_node,
8900 fold_convert_loc (loc,
8901 cst_uchar_ptr_node,
8902 arg2)));
8903 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8906 /* If len parameter is one, return an expression corresponding to
8907 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8908 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8910 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8911 tree cst_uchar_ptr_node
8912 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8914 tree ind1 = fold_convert_loc (loc, integer_type_node,
8915 build1 (INDIRECT_REF, cst_uchar_node,
8916 fold_convert_loc (loc,
8917 cst_uchar_ptr_node,
8918 arg1)));
8919 tree ind2 = fold_convert_loc (loc, integer_type_node,
8920 build1 (INDIRECT_REF, cst_uchar_node,
8921 fold_convert_loc (loc,
8922 cst_uchar_ptr_node,
8923 arg2)));
8924 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8927 return NULL_TREE;
8930 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8931 ARG. Return NULL_TREE if no simplification can be made. */
8933 static tree
8934 fold_builtin_signbit (location_t loc, tree arg, tree type)
8936 tree temp;
8938 if (!validate_arg (arg, REAL_TYPE))
8939 return NULL_TREE;
8941 /* If ARG is a compile-time constant, determine the result. */
8942 if (TREE_CODE (arg) == REAL_CST
8943 && !TREE_OVERFLOW (arg))
8945 REAL_VALUE_TYPE c;
8947 c = TREE_REAL_CST (arg);
8948 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8949 return fold_convert_loc (loc, type, temp);
8952 /* If ARG is non-negative, the result is always zero. */
8953 if (tree_expr_nonnegative_p (arg))
8954 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8956 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8957 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8958 return fold_build2_loc (loc, LT_EXPR, type, arg,
8959 build_real (TREE_TYPE (arg), dconst0));
8961 return NULL_TREE;
8964 /* Fold function call to builtin copysign, copysignf or copysignl with
8965 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8966 be made. */
8968 static tree
8969 fold_builtin_copysign (location_t loc, tree fndecl,
8970 tree arg1, tree arg2, tree type)
8972 tree tem;
8974 if (!validate_arg (arg1, REAL_TYPE)
8975 || !validate_arg (arg2, REAL_TYPE))
8976 return NULL_TREE;
8978 /* copysign(X,X) is X. */
8979 if (operand_equal_p (arg1, arg2, 0))
8980 return fold_convert_loc (loc, type, arg1);
8982 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8983 if (TREE_CODE (arg1) == REAL_CST
8984 && TREE_CODE (arg2) == REAL_CST
8985 && !TREE_OVERFLOW (arg1)
8986 && !TREE_OVERFLOW (arg2))
8988 REAL_VALUE_TYPE c1, c2;
8990 c1 = TREE_REAL_CST (arg1);
8991 c2 = TREE_REAL_CST (arg2);
8992 /* c1.sign := c2.sign. */
8993 real_copysign (&c1, &c2);
8994 return build_real (type, c1);
8997 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8998 Remember to evaluate Y for side-effects. */
8999 if (tree_expr_nonnegative_p (arg2))
9000 return omit_one_operand_loc (loc, type,
9001 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9002 arg2);
9004 /* Strip sign changing operations for the first argument. */
9005 tem = fold_strip_sign_ops (arg1);
9006 if (tem)
9007 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9009 return NULL_TREE;
9012 /* Fold a call to builtin isascii with argument ARG. */
9014 static tree
9015 fold_builtin_isascii (location_t loc, tree arg)
9017 if (!validate_arg (arg, INTEGER_TYPE))
9018 return NULL_TREE;
9019 else
9021 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9022 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9023 build_int_cst (NULL_TREE,
9024 ~ (unsigned HOST_WIDE_INT) 0x7f));
9025 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9026 arg, integer_zero_node);
9030 /* Fold a call to builtin toascii with argument ARG. */
9032 static tree
9033 fold_builtin_toascii (location_t loc, tree arg)
9035 if (!validate_arg (arg, INTEGER_TYPE))
9036 return NULL_TREE;
9038 /* Transform toascii(c) -> (c & 0x7f). */
9039 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9040 build_int_cst (NULL_TREE, 0x7f));
9043 /* Fold a call to builtin isdigit with argument ARG. */
9045 static tree
9046 fold_builtin_isdigit (location_t loc, tree arg)
9048 if (!validate_arg (arg, INTEGER_TYPE))
9049 return NULL_TREE;
9050 else
9052 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9053 /* According to the C standard, isdigit is unaffected by locale.
9054 However, it definitely is affected by the target character set. */
9055 unsigned HOST_WIDE_INT target_digit0
9056 = lang_hooks.to_target_charset ('0');
9058 if (target_digit0 == 0)
9059 return NULL_TREE;
9061 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9062 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9063 build_int_cst (unsigned_type_node, target_digit0));
9064 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9065 build_int_cst (unsigned_type_node, 9));
9069 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9071 static tree
9072 fold_builtin_fabs (location_t loc, tree arg, tree type)
9074 if (!validate_arg (arg, REAL_TYPE))
9075 return NULL_TREE;
9077 arg = fold_convert_loc (loc, type, arg);
9078 if (TREE_CODE (arg) == REAL_CST)
9079 return fold_abs_const (arg, type);
9080 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9083 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9085 static tree
9086 fold_builtin_abs (location_t loc, tree arg, tree type)
9088 if (!validate_arg (arg, INTEGER_TYPE))
9089 return NULL_TREE;
9091 arg = fold_convert_loc (loc, type, arg);
9092 if (TREE_CODE (arg) == INTEGER_CST)
9093 return fold_abs_const (arg, type);
9094 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9097 /* Fold a call to builtin fmin or fmax. */
9099 static tree
9100 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9101 tree type, bool max)
9103 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9105 /* Calculate the result when the argument is a constant. */
9106 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9108 if (res)
9109 return res;
9111 /* If either argument is NaN, return the other one. Avoid the
9112 transformation if we get (and honor) a signalling NaN. Using
9113 omit_one_operand() ensures we create a non-lvalue. */
9114 if (TREE_CODE (arg0) == REAL_CST
9115 && real_isnan (&TREE_REAL_CST (arg0))
9116 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9117 || ! TREE_REAL_CST (arg0).signalling))
9118 return omit_one_operand_loc (loc, type, arg1, arg0);
9119 if (TREE_CODE (arg1) == REAL_CST
9120 && real_isnan (&TREE_REAL_CST (arg1))
9121 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9122 || ! TREE_REAL_CST (arg1).signalling))
9123 return omit_one_operand_loc (loc, type, arg0, arg1);
9125 /* Transform fmin/fmax(x,x) -> x. */
9126 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9127 return omit_one_operand_loc (loc, type, arg0, arg1);
9129 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9130 functions to return the numeric arg if the other one is NaN.
9131 These tree codes don't honor that, so only transform if
9132 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9133 handled, so we don't have to worry about it either. */
9134 if (flag_finite_math_only)
9135 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9136 fold_convert_loc (loc, type, arg0),
9137 fold_convert_loc (loc, type, arg1));
9139 return NULL_TREE;
9142 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9144 static tree
9145 fold_builtin_carg (location_t loc, tree arg, tree type)
9147 if (validate_arg (arg, COMPLEX_TYPE)
9148 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9150 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9152 if (atan2_fn)
9154 tree new_arg = builtin_save_expr (arg);
9155 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9156 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9157 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9161 return NULL_TREE;
9164 /* Fold a call to builtin logb/ilogb. */
9166 static tree
9167 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9169 if (! validate_arg (arg, REAL_TYPE))
9170 return NULL_TREE;
9172 STRIP_NOPS (arg);
9174 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9176 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9178 switch (value->cl)
9180 case rvc_nan:
9181 case rvc_inf:
9182 /* If arg is Inf or NaN and we're logb, return it. */
9183 if (TREE_CODE (rettype) == REAL_TYPE)
9184 return fold_convert_loc (loc, rettype, arg);
9185 /* Fall through... */
9186 case rvc_zero:
9187 /* Zero may set errno and/or raise an exception for logb, also
9188 for ilogb we don't know FP_ILOGB0. */
9189 return NULL_TREE;
9190 case rvc_normal:
9191 /* For normal numbers, proceed iff radix == 2. In GCC,
9192 normalized significands are in the range [0.5, 1.0). We
9193 want the exponent as if they were [1.0, 2.0) so get the
9194 exponent and subtract 1. */
9195 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9196 return fold_convert_loc (loc, rettype,
9197 build_int_cst (NULL_TREE,
9198 REAL_EXP (value)-1));
9199 break;
9203 return NULL_TREE;
9206 /* Fold a call to builtin significand, if radix == 2. */
9208 static tree
9209 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9211 if (! validate_arg (arg, REAL_TYPE))
9212 return NULL_TREE;
9214 STRIP_NOPS (arg);
9216 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9218 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9220 switch (value->cl)
9222 case rvc_zero:
9223 case rvc_nan:
9224 case rvc_inf:
9225 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9226 return fold_convert_loc (loc, rettype, arg);
9227 case rvc_normal:
9228 /* For normal numbers, proceed iff radix == 2. */
9229 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9231 REAL_VALUE_TYPE result = *value;
9232 /* In GCC, normalized significands are in the range [0.5,
9233 1.0). We want them to be [1.0, 2.0) so set the
9234 exponent to 1. */
9235 SET_REAL_EXP (&result, 1);
9236 return build_real (rettype, result);
9238 break;
9242 return NULL_TREE;
9245 /* Fold a call to builtin frexp, we can assume the base is 2. */
9247 static tree
9248 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9250 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9251 return NULL_TREE;
9253 STRIP_NOPS (arg0);
9255 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9256 return NULL_TREE;
9258 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9260 /* Proceed if a valid pointer type was passed in. */
9261 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9263 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9264 tree frac, exp;
9266 switch (value->cl)
9268 case rvc_zero:
9269 /* For +-0, return (*exp = 0, +-0). */
9270 exp = integer_zero_node;
9271 frac = arg0;
9272 break;
9273 case rvc_nan:
9274 case rvc_inf:
9275 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9276 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9277 case rvc_normal:
9279 /* Since the frexp function always expects base 2, and in
9280 GCC normalized significands are already in the range
9281 [0.5, 1.0), we have exactly what frexp wants. */
9282 REAL_VALUE_TYPE frac_rvt = *value;
9283 SET_REAL_EXP (&frac_rvt, 0);
9284 frac = build_real (rettype, frac_rvt);
9285 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9287 break;
9288 default:
9289 gcc_unreachable ();
9292 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9293 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9294 TREE_SIDE_EFFECTS (arg1) = 1;
9295 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9298 return NULL_TREE;
9301 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9302 then we can assume the base is two. If it's false, then we have to
9303 check the mode of the TYPE parameter in certain cases. */
9305 static tree
9306 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9307 tree type, bool ldexp)
9309 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9311 STRIP_NOPS (arg0);
9312 STRIP_NOPS (arg1);
9314 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9315 if (real_zerop (arg0) || integer_zerop (arg1)
9316 || (TREE_CODE (arg0) == REAL_CST
9317 && !real_isfinite (&TREE_REAL_CST (arg0))))
9318 return omit_one_operand_loc (loc, type, arg0, arg1);
9320 /* If both arguments are constant, then try to evaluate it. */
9321 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9322 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9323 && host_integerp (arg1, 0))
9325 /* Bound the maximum adjustment to twice the range of the
9326 mode's valid exponents. Use abs to ensure the range is
9327 positive as a sanity check. */
9328 const long max_exp_adj = 2 *
9329 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9330 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9332 /* Get the user-requested adjustment. */
9333 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9335 /* The requested adjustment must be inside this range. This
9336 is a preliminary cap to avoid things like overflow, we
9337 may still fail to compute the result for other reasons. */
9338 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9340 REAL_VALUE_TYPE initial_result;
9342 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9344 /* Ensure we didn't overflow. */
9345 if (! real_isinf (&initial_result))
9347 const REAL_VALUE_TYPE trunc_result
9348 = real_value_truncate (TYPE_MODE (type), initial_result);
9350 /* Only proceed if the target mode can hold the
9351 resulting value. */
9352 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9353 return build_real (type, trunc_result);
9359 return NULL_TREE;
9362 /* Fold a call to builtin modf. */
9364 static tree
9365 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9367 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9368 return NULL_TREE;
9370 STRIP_NOPS (arg0);
9372 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9373 return NULL_TREE;
9375 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9377 /* Proceed if a valid pointer type was passed in. */
9378 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9380 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9381 REAL_VALUE_TYPE trunc, frac;
9383 switch (value->cl)
9385 case rvc_nan:
9386 case rvc_zero:
9387 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9388 trunc = frac = *value;
9389 break;
9390 case rvc_inf:
9391 /* For +-Inf, return (*arg1 = arg0, +-0). */
9392 frac = dconst0;
9393 frac.sign = value->sign;
9394 trunc = *value;
9395 break;
9396 case rvc_normal:
9397 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9398 real_trunc (&trunc, VOIDmode, value);
9399 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9400 /* If the original number was negative and already
9401 integral, then the fractional part is -0.0. */
9402 if (value->sign && frac.cl == rvc_zero)
9403 frac.sign = value->sign;
9404 break;
9407 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9408 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9409 build_real (rettype, trunc));
9410 TREE_SIDE_EFFECTS (arg1) = 1;
9411 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9412 build_real (rettype, frac));
9415 return NULL_TREE;
9418 /* Given a location LOC, an interclass builtin function decl FNDECL
9419 and its single argument ARG, return an folded expression computing
9420 the same, or NULL_TREE if we either couldn't or didn't want to fold
9421 (the latter happen if there's an RTL instruction available). */
9423 static tree
9424 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9426 enum machine_mode mode;
9428 if (!validate_arg (arg, REAL_TYPE))
9429 return NULL_TREE;
9431 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9432 return NULL_TREE;
9434 mode = TYPE_MODE (TREE_TYPE (arg));
9436 /* If there is no optab, try generic code. */
9437 switch (DECL_FUNCTION_CODE (fndecl))
9439 tree result;
9441 CASE_FLT_FN (BUILT_IN_ISINF):
9443 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9444 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9445 tree const type = TREE_TYPE (arg);
9446 REAL_VALUE_TYPE r;
9447 char buf[128];
9449 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9450 real_from_string (&r, buf);
9451 result = build_call_expr (isgr_fn, 2,
9452 fold_build1_loc (loc, ABS_EXPR, type, arg),
9453 build_real (type, r));
9454 return result;
9456 CASE_FLT_FN (BUILT_IN_FINITE):
9457 case BUILT_IN_ISFINITE:
9459 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9460 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9461 tree const type = TREE_TYPE (arg);
9462 REAL_VALUE_TYPE r;
9463 char buf[128];
9465 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9466 real_from_string (&r, buf);
9467 result = build_call_expr (isle_fn, 2,
9468 fold_build1_loc (loc, ABS_EXPR, type, arg),
9469 build_real (type, r));
9470 /*result = fold_build2_loc (loc, UNGT_EXPR,
9471 TREE_TYPE (TREE_TYPE (fndecl)),
9472 fold_build1_loc (loc, ABS_EXPR, type, arg),
9473 build_real (type, r));
9474 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9475 TREE_TYPE (TREE_TYPE (fndecl)),
9476 result);*/
9477 return result;
9479 case BUILT_IN_ISNORMAL:
9481 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9482 islessequal(fabs(x),DBL_MAX). */
9483 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9484 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9485 tree const type = TREE_TYPE (arg);
9486 REAL_VALUE_TYPE rmax, rmin;
9487 char buf[128];
9489 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9490 real_from_string (&rmax, buf);
9491 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9492 real_from_string (&rmin, buf);
9493 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9494 result = build_call_expr (isle_fn, 2, arg,
9495 build_real (type, rmax));
9496 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9497 build_call_expr (isge_fn, 2, arg,
9498 build_real (type, rmin)));
9499 return result;
9501 default:
9502 break;
9505 return NULL_TREE;
9508 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9509 ARG is the argument for the call. */
9511 static tree
9512 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9514 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9515 REAL_VALUE_TYPE r;
9517 if (!validate_arg (arg, REAL_TYPE))
9518 return NULL_TREE;
9520 switch (builtin_index)
9522 case BUILT_IN_ISINF:
9523 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9524 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9526 if (TREE_CODE (arg) == REAL_CST)
9528 r = TREE_REAL_CST (arg);
9529 if (real_isinf (&r))
9530 return real_compare (GT_EXPR, &r, &dconst0)
9531 ? integer_one_node : integer_minus_one_node;
9532 else
9533 return integer_zero_node;
9536 return NULL_TREE;
9538 case BUILT_IN_ISINF_SIGN:
9540 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9541 /* In a boolean context, GCC will fold the inner COND_EXPR to
9542 1. So e.g. "if (isinf_sign(x))" would be folded to just
9543 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9544 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9545 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9546 tree tmp = NULL_TREE;
9548 arg = builtin_save_expr (arg);
9550 if (signbit_fn && isinf_fn)
9552 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9553 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9555 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9556 signbit_call, integer_zero_node);
9557 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9558 isinf_call, integer_zero_node);
9560 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9561 integer_minus_one_node, integer_one_node);
9562 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9563 isinf_call, tmp,
9564 integer_zero_node);
9567 return tmp;
9570 case BUILT_IN_ISFINITE:
9571 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9572 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9573 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9575 if (TREE_CODE (arg) == REAL_CST)
9577 r = TREE_REAL_CST (arg);
9578 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9581 return NULL_TREE;
9583 case BUILT_IN_ISNAN:
9584 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9585 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9587 if (TREE_CODE (arg) == REAL_CST)
9589 r = TREE_REAL_CST (arg);
9590 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9593 arg = builtin_save_expr (arg);
9594 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9596 default:
9597 gcc_unreachable ();
9601 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9602 This builtin will generate code to return the appropriate floating
9603 point classification depending on the value of the floating point
9604 number passed in. The possible return values must be supplied as
9605 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9606 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9607 one floating point argument which is "type generic". */
9609 static tree
9610 fold_builtin_fpclassify (location_t loc, tree exp)
9612 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9613 arg, type, res, tmp;
9614 enum machine_mode mode;
9615 REAL_VALUE_TYPE r;
9616 char buf[128];
9618 /* Verify the required arguments in the original call. */
9619 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9620 INTEGER_TYPE, INTEGER_TYPE,
9621 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9622 return NULL_TREE;
9624 fp_nan = CALL_EXPR_ARG (exp, 0);
9625 fp_infinite = CALL_EXPR_ARG (exp, 1);
9626 fp_normal = CALL_EXPR_ARG (exp, 2);
9627 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9628 fp_zero = CALL_EXPR_ARG (exp, 4);
9629 arg = CALL_EXPR_ARG (exp, 5);
9630 type = TREE_TYPE (arg);
9631 mode = TYPE_MODE (type);
9632 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9634 /* fpclassify(x) ->
9635 isnan(x) ? FP_NAN :
9636 (fabs(x) == Inf ? FP_INFINITE :
9637 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9638 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9640 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9641 build_real (type, dconst0));
9642 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9643 tmp, fp_zero, fp_subnormal);
9645 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9646 real_from_string (&r, buf);
9647 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9648 arg, build_real (type, r));
9649 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9651 if (HONOR_INFINITIES (mode))
9653 real_inf (&r);
9654 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9655 build_real (type, r));
9656 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9657 fp_infinite, res);
9660 if (HONOR_NANS (mode))
9662 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9663 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9666 return res;
9669 /* Fold a call to an unordered comparison function such as
9670 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9671 being called and ARG0 and ARG1 are the arguments for the call.
9672 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9673 the opposite of the desired result. UNORDERED_CODE is used
9674 for modes that can hold NaNs and ORDERED_CODE is used for
9675 the rest. */
9677 static tree
9678 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9679 enum tree_code unordered_code,
9680 enum tree_code ordered_code)
9682 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9683 enum tree_code code;
9684 tree type0, type1;
9685 enum tree_code code0, code1;
9686 tree cmp_type = NULL_TREE;
9688 type0 = TREE_TYPE (arg0);
9689 type1 = TREE_TYPE (arg1);
9691 code0 = TREE_CODE (type0);
9692 code1 = TREE_CODE (type1);
9694 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9695 /* Choose the wider of two real types. */
9696 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9697 ? type0 : type1;
9698 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9699 cmp_type = type0;
9700 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9701 cmp_type = type1;
9703 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9704 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9706 if (unordered_code == UNORDERED_EXPR)
9708 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9709 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9710 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9713 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9714 : ordered_code;
9715 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9716 fold_build2_loc (loc, code, type, arg0, arg1));
9719 /* Fold a call to built-in function FNDECL with 0 arguments.
9720 IGNORE is true if the result of the function call is ignored. This
9721 function returns NULL_TREE if no simplification was possible. */
9723 static tree
9724 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9726 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9727 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9728 switch (fcode)
9730 CASE_FLT_FN (BUILT_IN_INF):
9731 case BUILT_IN_INFD32:
9732 case BUILT_IN_INFD64:
9733 case BUILT_IN_INFD128:
9734 return fold_builtin_inf (loc, type, true);
9736 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9737 return fold_builtin_inf (loc, type, false);
9739 case BUILT_IN_CLASSIFY_TYPE:
9740 return fold_builtin_classify_type (NULL_TREE);
9742 default:
9743 break;
9745 return NULL_TREE;
9748 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9749 IGNORE is true if the result of the function call is ignored. This
9750 function returns NULL_TREE if no simplification was possible. */
9752 static tree
9753 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9755 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9756 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9757 switch (fcode)
9760 case BUILT_IN_CONSTANT_P:
9762 tree val = fold_builtin_constant_p (arg0);
9764 /* Gimplification will pull the CALL_EXPR for the builtin out of
9765 an if condition. When not optimizing, we'll not CSE it back.
9766 To avoid link error types of regressions, return false now. */
9767 if (!val && !optimize)
9768 val = integer_zero_node;
9770 return val;
9773 case BUILT_IN_CLASSIFY_TYPE:
9774 return fold_builtin_classify_type (arg0);
9776 case BUILT_IN_STRLEN:
9777 return fold_builtin_strlen (loc, type, arg0);
9779 CASE_FLT_FN (BUILT_IN_FABS):
9780 return fold_builtin_fabs (loc, arg0, type);
9782 case BUILT_IN_ABS:
9783 case BUILT_IN_LABS:
9784 case BUILT_IN_LLABS:
9785 case BUILT_IN_IMAXABS:
9786 return fold_builtin_abs (loc, arg0, type);
9788 CASE_FLT_FN (BUILT_IN_CONJ):
9789 if (validate_arg (arg0, COMPLEX_TYPE)
9790 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9791 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9792 break;
9794 CASE_FLT_FN (BUILT_IN_CREAL):
9795 if (validate_arg (arg0, COMPLEX_TYPE)
9796 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9797 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9798 break;
9800 CASE_FLT_FN (BUILT_IN_CIMAG):
9801 if (validate_arg (arg0, COMPLEX_TYPE)
9802 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9803 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9804 break;
9806 CASE_FLT_FN (BUILT_IN_CCOS):
9807 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9809 CASE_FLT_FN (BUILT_IN_CCOSH):
9810 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9812 CASE_FLT_FN (BUILT_IN_CSIN):
9813 if (validate_arg (arg0, COMPLEX_TYPE)
9814 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9815 return do_mpc_arg1 (arg0, type, mpc_sin);
9816 break;
9818 CASE_FLT_FN (BUILT_IN_CSINH):
9819 if (validate_arg (arg0, COMPLEX_TYPE)
9820 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9821 return do_mpc_arg1 (arg0, type, mpc_sinh);
9822 break;
9824 CASE_FLT_FN (BUILT_IN_CTAN):
9825 if (validate_arg (arg0, COMPLEX_TYPE)
9826 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9827 return do_mpc_arg1 (arg0, type, mpc_tan);
9828 break;
9830 CASE_FLT_FN (BUILT_IN_CTANH):
9831 if (validate_arg (arg0, COMPLEX_TYPE)
9832 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9833 return do_mpc_arg1 (arg0, type, mpc_tanh);
9834 break;
9836 CASE_FLT_FN (BUILT_IN_CLOG):
9837 if (validate_arg (arg0, COMPLEX_TYPE)
9838 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9839 return do_mpc_arg1 (arg0, type, mpc_log);
9840 break;
9842 CASE_FLT_FN (BUILT_IN_CSQRT):
9843 if (validate_arg (arg0, COMPLEX_TYPE)
9844 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9845 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9846 break;
9848 CASE_FLT_FN (BUILT_IN_CASIN):
9849 if (validate_arg (arg0, COMPLEX_TYPE)
9850 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9851 return do_mpc_arg1 (arg0, type, mpc_asin);
9852 break;
9854 CASE_FLT_FN (BUILT_IN_CACOS):
9855 if (validate_arg (arg0, COMPLEX_TYPE)
9856 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9857 return do_mpc_arg1 (arg0, type, mpc_acos);
9858 break;
9860 CASE_FLT_FN (BUILT_IN_CATAN):
9861 if (validate_arg (arg0, COMPLEX_TYPE)
9862 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9863 return do_mpc_arg1 (arg0, type, mpc_atan);
9864 break;
9866 CASE_FLT_FN (BUILT_IN_CASINH):
9867 if (validate_arg (arg0, COMPLEX_TYPE)
9868 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9869 return do_mpc_arg1 (arg0, type, mpc_asinh);
9870 break;
9872 CASE_FLT_FN (BUILT_IN_CACOSH):
9873 if (validate_arg (arg0, COMPLEX_TYPE)
9874 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9875 return do_mpc_arg1 (arg0, type, mpc_acosh);
9876 break;
9878 CASE_FLT_FN (BUILT_IN_CATANH):
9879 if (validate_arg (arg0, COMPLEX_TYPE)
9880 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9881 return do_mpc_arg1 (arg0, type, mpc_atanh);
9882 break;
9884 CASE_FLT_FN (BUILT_IN_CABS):
9885 return fold_builtin_cabs (loc, arg0, type, fndecl);
9887 CASE_FLT_FN (BUILT_IN_CARG):
9888 return fold_builtin_carg (loc, arg0, type);
9890 CASE_FLT_FN (BUILT_IN_SQRT):
9891 return fold_builtin_sqrt (loc, arg0, type);
9893 CASE_FLT_FN (BUILT_IN_CBRT):
9894 return fold_builtin_cbrt (loc, arg0, type);
9896 CASE_FLT_FN (BUILT_IN_ASIN):
9897 if (validate_arg (arg0, REAL_TYPE))
9898 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9899 &dconstm1, &dconst1, true);
9900 break;
9902 CASE_FLT_FN (BUILT_IN_ACOS):
9903 if (validate_arg (arg0, REAL_TYPE))
9904 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9905 &dconstm1, &dconst1, true);
9906 break;
9908 CASE_FLT_FN (BUILT_IN_ATAN):
9909 if (validate_arg (arg0, REAL_TYPE))
9910 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9911 break;
9913 CASE_FLT_FN (BUILT_IN_ASINH):
9914 if (validate_arg (arg0, REAL_TYPE))
9915 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9916 break;
9918 CASE_FLT_FN (BUILT_IN_ACOSH):
9919 if (validate_arg (arg0, REAL_TYPE))
9920 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9921 &dconst1, NULL, true);
9922 break;
9924 CASE_FLT_FN (BUILT_IN_ATANH):
9925 if (validate_arg (arg0, REAL_TYPE))
9926 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9927 &dconstm1, &dconst1, false);
9928 break;
9930 CASE_FLT_FN (BUILT_IN_SIN):
9931 if (validate_arg (arg0, REAL_TYPE))
9932 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9933 break;
9935 CASE_FLT_FN (BUILT_IN_COS):
9936 return fold_builtin_cos (loc, arg0, type, fndecl);
9938 CASE_FLT_FN (BUILT_IN_TAN):
9939 return fold_builtin_tan (arg0, type);
9941 CASE_FLT_FN (BUILT_IN_CEXP):
9942 return fold_builtin_cexp (loc, arg0, type);
9944 CASE_FLT_FN (BUILT_IN_CEXPI):
9945 if (validate_arg (arg0, REAL_TYPE))
9946 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9947 break;
9949 CASE_FLT_FN (BUILT_IN_SINH):
9950 if (validate_arg (arg0, REAL_TYPE))
9951 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9952 break;
9954 CASE_FLT_FN (BUILT_IN_COSH):
9955 return fold_builtin_cosh (loc, arg0, type, fndecl);
9957 CASE_FLT_FN (BUILT_IN_TANH):
9958 if (validate_arg (arg0, REAL_TYPE))
9959 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9960 break;
9962 CASE_FLT_FN (BUILT_IN_ERF):
9963 if (validate_arg (arg0, REAL_TYPE))
9964 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9965 break;
9967 CASE_FLT_FN (BUILT_IN_ERFC):
9968 if (validate_arg (arg0, REAL_TYPE))
9969 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9970 break;
9972 CASE_FLT_FN (BUILT_IN_TGAMMA):
9973 if (validate_arg (arg0, REAL_TYPE))
9974 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9975 break;
9977 CASE_FLT_FN (BUILT_IN_EXP):
9978 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9980 CASE_FLT_FN (BUILT_IN_EXP2):
9981 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9983 CASE_FLT_FN (BUILT_IN_EXP10):
9984 CASE_FLT_FN (BUILT_IN_POW10):
9985 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9987 CASE_FLT_FN (BUILT_IN_EXPM1):
9988 if (validate_arg (arg0, REAL_TYPE))
9989 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9990 break;
9992 CASE_FLT_FN (BUILT_IN_LOG):
9993 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9995 CASE_FLT_FN (BUILT_IN_LOG2):
9996 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9998 CASE_FLT_FN (BUILT_IN_LOG10):
9999 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10001 CASE_FLT_FN (BUILT_IN_LOG1P):
10002 if (validate_arg (arg0, REAL_TYPE))
10003 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10004 &dconstm1, NULL, false);
10005 break;
10007 CASE_FLT_FN (BUILT_IN_J0):
10008 if (validate_arg (arg0, REAL_TYPE))
10009 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10010 NULL, NULL, 0);
10011 break;
10013 CASE_FLT_FN (BUILT_IN_J1):
10014 if (validate_arg (arg0, REAL_TYPE))
10015 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10016 NULL, NULL, 0);
10017 break;
10019 CASE_FLT_FN (BUILT_IN_Y0):
10020 if (validate_arg (arg0, REAL_TYPE))
10021 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10022 &dconst0, NULL, false);
10023 break;
10025 CASE_FLT_FN (BUILT_IN_Y1):
10026 if (validate_arg (arg0, REAL_TYPE))
10027 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10028 &dconst0, NULL, false);
10029 break;
10031 CASE_FLT_FN (BUILT_IN_NAN):
10032 case BUILT_IN_NAND32:
10033 case BUILT_IN_NAND64:
10034 case BUILT_IN_NAND128:
10035 return fold_builtin_nan (arg0, type, true);
10037 CASE_FLT_FN (BUILT_IN_NANS):
10038 return fold_builtin_nan (arg0, type, false);
10040 CASE_FLT_FN (BUILT_IN_FLOOR):
10041 return fold_builtin_floor (loc, fndecl, arg0);
10043 CASE_FLT_FN (BUILT_IN_CEIL):
10044 return fold_builtin_ceil (loc, fndecl, arg0);
10046 CASE_FLT_FN (BUILT_IN_TRUNC):
10047 return fold_builtin_trunc (loc, fndecl, arg0);
10049 CASE_FLT_FN (BUILT_IN_ROUND):
10050 return fold_builtin_round (loc, fndecl, arg0);
10052 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10053 CASE_FLT_FN (BUILT_IN_RINT):
10054 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10056 CASE_FLT_FN (BUILT_IN_LCEIL):
10057 CASE_FLT_FN (BUILT_IN_LLCEIL):
10058 CASE_FLT_FN (BUILT_IN_LFLOOR):
10059 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10060 CASE_FLT_FN (BUILT_IN_LROUND):
10061 CASE_FLT_FN (BUILT_IN_LLROUND):
10062 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10064 CASE_FLT_FN (BUILT_IN_LRINT):
10065 CASE_FLT_FN (BUILT_IN_LLRINT):
10066 return fold_fixed_mathfn (loc, fndecl, arg0);
10068 case BUILT_IN_BSWAP32:
10069 case BUILT_IN_BSWAP64:
10070 return fold_builtin_bswap (fndecl, arg0);
10072 CASE_INT_FN (BUILT_IN_FFS):
10073 CASE_INT_FN (BUILT_IN_CLZ):
10074 CASE_INT_FN (BUILT_IN_CTZ):
10075 CASE_INT_FN (BUILT_IN_POPCOUNT):
10076 CASE_INT_FN (BUILT_IN_PARITY):
10077 return fold_builtin_bitop (fndecl, arg0);
10079 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10080 return fold_builtin_signbit (loc, arg0, type);
10082 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10083 return fold_builtin_significand (loc, arg0, type);
10085 CASE_FLT_FN (BUILT_IN_ILOGB):
10086 CASE_FLT_FN (BUILT_IN_LOGB):
10087 return fold_builtin_logb (loc, arg0, type);
10089 case BUILT_IN_ISASCII:
10090 return fold_builtin_isascii (loc, arg0);
10092 case BUILT_IN_TOASCII:
10093 return fold_builtin_toascii (loc, arg0);
10095 case BUILT_IN_ISDIGIT:
10096 return fold_builtin_isdigit (loc, arg0);
10098 CASE_FLT_FN (BUILT_IN_FINITE):
10099 case BUILT_IN_FINITED32:
10100 case BUILT_IN_FINITED64:
10101 case BUILT_IN_FINITED128:
10102 case BUILT_IN_ISFINITE:
10104 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10105 if (ret)
10106 return ret;
10107 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10110 CASE_FLT_FN (BUILT_IN_ISINF):
10111 case BUILT_IN_ISINFD32:
10112 case BUILT_IN_ISINFD64:
10113 case BUILT_IN_ISINFD128:
10115 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10116 if (ret)
10117 return ret;
10118 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10121 case BUILT_IN_ISNORMAL:
10122 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10124 case BUILT_IN_ISINF_SIGN:
10125 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10127 CASE_FLT_FN (BUILT_IN_ISNAN):
10128 case BUILT_IN_ISNAND32:
10129 case BUILT_IN_ISNAND64:
10130 case BUILT_IN_ISNAND128:
10131 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10133 case BUILT_IN_PRINTF:
10134 case BUILT_IN_PRINTF_UNLOCKED:
10135 case BUILT_IN_VPRINTF:
10136 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10138 default:
10139 break;
10142 return NULL_TREE;
10146 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10147 IGNORE is true if the result of the function call is ignored. This
10148 function returns NULL_TREE if no simplification was possible. */
10150 static tree
10151 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10153 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10154 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10156 switch (fcode)
10158 CASE_FLT_FN (BUILT_IN_JN):
10159 if (validate_arg (arg0, INTEGER_TYPE)
10160 && validate_arg (arg1, REAL_TYPE))
10161 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10162 break;
10164 CASE_FLT_FN (BUILT_IN_YN):
10165 if (validate_arg (arg0, INTEGER_TYPE)
10166 && validate_arg (arg1, REAL_TYPE))
10167 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10168 &dconst0, false);
10169 break;
10171 CASE_FLT_FN (BUILT_IN_DREM):
10172 CASE_FLT_FN (BUILT_IN_REMAINDER):
10173 if (validate_arg (arg0, REAL_TYPE)
10174 && validate_arg(arg1, REAL_TYPE))
10175 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10176 break;
10178 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10179 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10180 if (validate_arg (arg0, REAL_TYPE)
10181 && validate_arg(arg1, POINTER_TYPE))
10182 return do_mpfr_lgamma_r (arg0, arg1, type);
10183 break;
10185 CASE_FLT_FN (BUILT_IN_ATAN2):
10186 if (validate_arg (arg0, REAL_TYPE)
10187 && validate_arg(arg1, REAL_TYPE))
10188 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10189 break;
10191 CASE_FLT_FN (BUILT_IN_FDIM):
10192 if (validate_arg (arg0, REAL_TYPE)
10193 && validate_arg(arg1, REAL_TYPE))
10194 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10195 break;
10197 CASE_FLT_FN (BUILT_IN_HYPOT):
10198 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10200 CASE_FLT_FN (BUILT_IN_CPOW):
10201 if (validate_arg (arg0, COMPLEX_TYPE)
10202 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10203 && validate_arg (arg1, COMPLEX_TYPE)
10204 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10205 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10206 break;
10208 CASE_FLT_FN (BUILT_IN_LDEXP):
10209 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10210 CASE_FLT_FN (BUILT_IN_SCALBN):
10211 CASE_FLT_FN (BUILT_IN_SCALBLN):
10212 return fold_builtin_load_exponent (loc, arg0, arg1,
10213 type, /*ldexp=*/false);
10215 CASE_FLT_FN (BUILT_IN_FREXP):
10216 return fold_builtin_frexp (loc, arg0, arg1, type);
10218 CASE_FLT_FN (BUILT_IN_MODF):
10219 return fold_builtin_modf (loc, arg0, arg1, type);
10221 case BUILT_IN_BZERO:
10222 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10224 case BUILT_IN_FPUTS:
10225 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10227 case BUILT_IN_FPUTS_UNLOCKED:
10228 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10230 case BUILT_IN_STRSTR:
10231 return fold_builtin_strstr (loc, arg0, arg1, type);
10233 case BUILT_IN_STRCAT:
10234 return fold_builtin_strcat (loc, arg0, arg1);
10236 case BUILT_IN_STRSPN:
10237 return fold_builtin_strspn (loc, arg0, arg1);
10239 case BUILT_IN_STRCSPN:
10240 return fold_builtin_strcspn (loc, arg0, arg1);
10242 case BUILT_IN_STRCHR:
10243 case BUILT_IN_INDEX:
10244 return fold_builtin_strchr (loc, arg0, arg1, type);
10246 case BUILT_IN_STRRCHR:
10247 case BUILT_IN_RINDEX:
10248 return fold_builtin_strrchr (loc, arg0, arg1, type);
10250 case BUILT_IN_STRCPY:
10251 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10253 case BUILT_IN_STPCPY:
10254 if (ignore)
10256 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10257 if (!fn)
10258 break;
10260 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10262 else
10263 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10264 break;
10266 case BUILT_IN_STRCMP:
10267 return fold_builtin_strcmp (loc, arg0, arg1);
10269 case BUILT_IN_STRPBRK:
10270 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10272 case BUILT_IN_EXPECT:
10273 return fold_builtin_expect (loc, arg0, arg1);
10275 CASE_FLT_FN (BUILT_IN_POW):
10276 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10278 CASE_FLT_FN (BUILT_IN_POWI):
10279 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10281 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10282 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10284 CASE_FLT_FN (BUILT_IN_FMIN):
10285 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10287 CASE_FLT_FN (BUILT_IN_FMAX):
10288 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10290 case BUILT_IN_ISGREATER:
10291 return fold_builtin_unordered_cmp (loc, fndecl,
10292 arg0, arg1, UNLE_EXPR, LE_EXPR);
10293 case BUILT_IN_ISGREATEREQUAL:
10294 return fold_builtin_unordered_cmp (loc, fndecl,
10295 arg0, arg1, UNLT_EXPR, LT_EXPR);
10296 case BUILT_IN_ISLESS:
10297 return fold_builtin_unordered_cmp (loc, fndecl,
10298 arg0, arg1, UNGE_EXPR, GE_EXPR);
10299 case BUILT_IN_ISLESSEQUAL:
10300 return fold_builtin_unordered_cmp (loc, fndecl,
10301 arg0, arg1, UNGT_EXPR, GT_EXPR);
10302 case BUILT_IN_ISLESSGREATER:
10303 return fold_builtin_unordered_cmp (loc, fndecl,
10304 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10305 case BUILT_IN_ISUNORDERED:
10306 return fold_builtin_unordered_cmp (loc, fndecl,
10307 arg0, arg1, UNORDERED_EXPR,
10308 NOP_EXPR);
10310 /* We do the folding for va_start in the expander. */
10311 case BUILT_IN_VA_START:
10312 break;
10314 case BUILT_IN_SPRINTF:
10315 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10317 case BUILT_IN_OBJECT_SIZE:
10318 return fold_builtin_object_size (arg0, arg1);
10320 case BUILT_IN_PRINTF:
10321 case BUILT_IN_PRINTF_UNLOCKED:
10322 case BUILT_IN_VPRINTF:
10323 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10325 case BUILT_IN_PRINTF_CHK:
10326 case BUILT_IN_VPRINTF_CHK:
10327 if (!validate_arg (arg0, INTEGER_TYPE)
10328 || TREE_SIDE_EFFECTS (arg0))
10329 return NULL_TREE;
10330 else
10331 return fold_builtin_printf (loc, fndecl,
10332 arg1, NULL_TREE, ignore, fcode);
10333 break;
10335 case BUILT_IN_FPRINTF:
10336 case BUILT_IN_FPRINTF_UNLOCKED:
10337 case BUILT_IN_VFPRINTF:
10338 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10339 ignore, fcode);
10341 default:
10342 break;
10344 return NULL_TREE;
10347 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10348 and ARG2. IGNORE is true if the result of the function call is ignored.
10349 This function returns NULL_TREE if no simplification was possible. */
10351 static tree
10352 fold_builtin_3 (location_t loc, tree fndecl,
10353 tree arg0, tree arg1, tree arg2, bool ignore)
10355 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10356 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10357 switch (fcode)
10360 CASE_FLT_FN (BUILT_IN_SINCOS):
10361 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10363 CASE_FLT_FN (BUILT_IN_FMA):
10364 if (validate_arg (arg0, REAL_TYPE)
10365 && validate_arg(arg1, REAL_TYPE)
10366 && validate_arg(arg2, REAL_TYPE))
10367 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10368 break;
10370 CASE_FLT_FN (BUILT_IN_REMQUO):
10371 if (validate_arg (arg0, REAL_TYPE)
10372 && validate_arg(arg1, REAL_TYPE)
10373 && validate_arg(arg2, POINTER_TYPE))
10374 return do_mpfr_remquo (arg0, arg1, arg2);
10375 break;
10377 case BUILT_IN_MEMSET:
10378 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10380 case BUILT_IN_BCOPY:
10381 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10382 void_type_node, true, /*endp=*/3);
10384 case BUILT_IN_MEMCPY:
10385 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10386 type, ignore, /*endp=*/0);
10388 case BUILT_IN_MEMPCPY:
10389 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10390 type, ignore, /*endp=*/1);
10392 case BUILT_IN_MEMMOVE:
10393 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10394 type, ignore, /*endp=*/3);
10396 case BUILT_IN_STRNCAT:
10397 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10399 case BUILT_IN_STRNCPY:
10400 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10402 case BUILT_IN_STRNCMP:
10403 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10405 case BUILT_IN_MEMCHR:
10406 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10408 case BUILT_IN_BCMP:
10409 case BUILT_IN_MEMCMP:
10410 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10412 case BUILT_IN_SPRINTF:
10413 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10415 case BUILT_IN_STRCPY_CHK:
10416 case BUILT_IN_STPCPY_CHK:
10417 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10418 ignore, fcode);
10420 case BUILT_IN_STRCAT_CHK:
10421 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10423 case BUILT_IN_PRINTF_CHK:
10424 case BUILT_IN_VPRINTF_CHK:
10425 if (!validate_arg (arg0, INTEGER_TYPE)
10426 || TREE_SIDE_EFFECTS (arg0))
10427 return NULL_TREE;
10428 else
10429 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10430 break;
10432 case BUILT_IN_FPRINTF:
10433 case BUILT_IN_FPRINTF_UNLOCKED:
10434 case BUILT_IN_VFPRINTF:
10435 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10436 ignore, fcode);
10438 case BUILT_IN_FPRINTF_CHK:
10439 case BUILT_IN_VFPRINTF_CHK:
10440 if (!validate_arg (arg1, INTEGER_TYPE)
10441 || TREE_SIDE_EFFECTS (arg1))
10442 return NULL_TREE;
10443 else
10444 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10445 ignore, fcode);
10447 default:
10448 break;
10450 return NULL_TREE;
10453 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10454 ARG2, and ARG3. IGNORE is true if the result of the function call is
10455 ignored. This function returns NULL_TREE if no simplification was
10456 possible. */
10458 static tree
10459 fold_builtin_4 (location_t loc, tree fndecl,
10460 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10462 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10464 switch (fcode)
10466 case BUILT_IN_MEMCPY_CHK:
10467 case BUILT_IN_MEMPCPY_CHK:
10468 case BUILT_IN_MEMMOVE_CHK:
10469 case BUILT_IN_MEMSET_CHK:
10470 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10471 NULL_TREE, ignore,
10472 DECL_FUNCTION_CODE (fndecl));
10474 case BUILT_IN_STRNCPY_CHK:
10475 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10477 case BUILT_IN_STRNCAT_CHK:
10478 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10480 case BUILT_IN_FPRINTF_CHK:
10481 case BUILT_IN_VFPRINTF_CHK:
10482 if (!validate_arg (arg1, INTEGER_TYPE)
10483 || TREE_SIDE_EFFECTS (arg1))
10484 return NULL_TREE;
10485 else
10486 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10487 ignore, fcode);
10488 break;
10490 default:
10491 break;
10493 return NULL_TREE;
10496 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10497 arguments, where NARGS <= 4. IGNORE is true if the result of the
10498 function call is ignored. This function returns NULL_TREE if no
10499 simplification was possible. Note that this only folds builtins with
10500 fixed argument patterns. Foldings that do varargs-to-varargs
10501 transformations, or that match calls with more than 4 arguments,
10502 need to be handled with fold_builtin_varargs instead. */
10504 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10506 static tree
10507 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10509 tree ret = NULL_TREE;
10511 switch (nargs)
10513 case 0:
10514 ret = fold_builtin_0 (loc, fndecl, ignore);
10515 break;
10516 case 1:
10517 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10518 break;
10519 case 2:
10520 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10521 break;
10522 case 3:
10523 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10524 break;
10525 case 4:
10526 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10527 ignore);
10528 break;
10529 default:
10530 break;
10532 if (ret)
10534 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10535 SET_EXPR_LOCATION (ret, loc);
10536 TREE_NO_WARNING (ret) = 1;
10537 return ret;
10539 return NULL_TREE;
10542 /* Builtins with folding operations that operate on "..." arguments
10543 need special handling; we need to store the arguments in a convenient
10544 data structure before attempting any folding. Fortunately there are
10545 only a few builtins that fall into this category. FNDECL is the
10546 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10547 result of the function call is ignored. */
10549 static tree
10550 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10551 bool ignore ATTRIBUTE_UNUSED)
10553 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10554 tree ret = NULL_TREE;
10556 switch (fcode)
10558 case BUILT_IN_SPRINTF_CHK:
10559 case BUILT_IN_VSPRINTF_CHK:
10560 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10561 break;
10563 case BUILT_IN_SNPRINTF_CHK:
10564 case BUILT_IN_VSNPRINTF_CHK:
10565 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10566 break;
10568 case BUILT_IN_FPCLASSIFY:
10569 ret = fold_builtin_fpclassify (loc, exp);
10570 break;
10572 default:
10573 break;
10575 if (ret)
10577 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10578 SET_EXPR_LOCATION (ret, loc);
10579 TREE_NO_WARNING (ret) = 1;
10580 return ret;
10582 return NULL_TREE;
10585 /* Return true if FNDECL shouldn't be folded right now.
10586 If a built-in function has an inline attribute always_inline
10587 wrapper, defer folding it after always_inline functions have
10588 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10589 might not be performed. */
10591 static bool
10592 avoid_folding_inline_builtin (tree fndecl)
10594 return (DECL_DECLARED_INLINE_P (fndecl)
10595 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10596 && cfun
10597 && !cfun->always_inline_functions_inlined
10598 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10601 /* A wrapper function for builtin folding that prevents warnings for
10602 "statement without effect" and the like, caused by removing the
10603 call node earlier than the warning is generated. */
10605 tree
10606 fold_call_expr (location_t loc, tree exp, bool ignore)
10608 tree ret = NULL_TREE;
10609 tree fndecl = get_callee_fndecl (exp);
10610 if (fndecl
10611 && TREE_CODE (fndecl) == FUNCTION_DECL
10612 && DECL_BUILT_IN (fndecl)
10613 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10614 yet. Defer folding until we see all the arguments
10615 (after inlining). */
10616 && !CALL_EXPR_VA_ARG_PACK (exp))
10618 int nargs = call_expr_nargs (exp);
10620 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10621 instead last argument is __builtin_va_arg_pack (). Defer folding
10622 even in that case, until arguments are finalized. */
10623 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10625 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10626 if (fndecl2
10627 && TREE_CODE (fndecl2) == FUNCTION_DECL
10628 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10629 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10630 return NULL_TREE;
10633 if (avoid_folding_inline_builtin (fndecl))
10634 return NULL_TREE;
10636 /* FIXME: Don't use a list in this interface. */
10637 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10638 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10639 else
10641 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10643 tree *args = CALL_EXPR_ARGP (exp);
10644 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10646 if (!ret)
10647 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10648 if (ret)
10649 return ret;
10652 return NULL_TREE;
10655 /* Conveniently construct a function call expression. FNDECL names the
10656 function to be called and ARGLIST is a TREE_LIST of arguments. */
10658 tree
10659 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10661 tree fntype = TREE_TYPE (fndecl);
10662 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10663 int n = list_length (arglist);
10664 tree *argarray = (tree *) alloca (n * sizeof (tree));
10665 int i;
10667 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10668 argarray[i] = TREE_VALUE (arglist);
10669 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10672 /* Conveniently construct a function call expression. FNDECL names the
10673 function to be called, N is the number of arguments, and the "..."
10674 parameters are the argument expressions. */
10676 tree
10677 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10679 va_list ap;
10680 tree fntype = TREE_TYPE (fndecl);
10681 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10682 tree *argarray = (tree *) alloca (n * sizeof (tree));
10683 int i;
10685 va_start (ap, n);
10686 for (i = 0; i < n; i++)
10687 argarray[i] = va_arg (ap, tree);
10688 va_end (ap);
10689 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10692 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10693 N arguments are passed in the array ARGARRAY. */
10695 tree
10696 fold_builtin_call_array (location_t loc, tree type,
10697 tree fn,
10698 int n,
10699 tree *argarray)
10701 tree ret = NULL_TREE;
10702 int i;
10703 tree exp;
10705 if (TREE_CODE (fn) == ADDR_EXPR)
10707 tree fndecl = TREE_OPERAND (fn, 0);
10708 if (TREE_CODE (fndecl) == FUNCTION_DECL
10709 && DECL_BUILT_IN (fndecl))
10711 /* If last argument is __builtin_va_arg_pack (), arguments to this
10712 function are not finalized yet. Defer folding until they are. */
10713 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10715 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10716 if (fndecl2
10717 && TREE_CODE (fndecl2) == FUNCTION_DECL
10718 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10719 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10720 return build_call_array_loc (loc, type, fn, n, argarray);
10722 if (avoid_folding_inline_builtin (fndecl))
10723 return build_call_array_loc (loc, type, fn, n, argarray);
10724 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10726 tree arglist = NULL_TREE;
10727 for (i = n - 1; i >= 0; i--)
10728 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10729 ret = targetm.fold_builtin (fndecl, arglist, false);
10730 if (ret)
10731 return ret;
10732 return build_call_array_loc (loc, type, fn, n, argarray);
10734 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10736 /* First try the transformations that don't require consing up
10737 an exp. */
10738 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10739 if (ret)
10740 return ret;
10743 /* If we got this far, we need to build an exp. */
10744 exp = build_call_array_loc (loc, type, fn, n, argarray);
10745 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10746 return ret ? ret : exp;
10750 return build_call_array_loc (loc, type, fn, n, argarray);
10753 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10754 along with N new arguments specified as the "..." parameters. SKIP
10755 is the number of arguments in EXP to be omitted. This function is used
10756 to do varargs-to-varargs transformations. */
10758 static tree
10759 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10761 int oldnargs = call_expr_nargs (exp);
10762 int nargs = oldnargs - skip + n;
10763 tree fntype = TREE_TYPE (fndecl);
10764 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10765 tree *buffer;
10767 if (n > 0)
10769 int i, j;
10770 va_list ap;
10772 buffer = XALLOCAVEC (tree, nargs);
10773 va_start (ap, n);
10774 for (i = 0; i < n; i++)
10775 buffer[i] = va_arg (ap, tree);
10776 va_end (ap);
10777 for (j = skip; j < oldnargs; j++, i++)
10778 buffer[i] = CALL_EXPR_ARG (exp, j);
10780 else
10781 buffer = CALL_EXPR_ARGP (exp) + skip;
10783 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10786 /* Validate a single argument ARG against a tree code CODE representing
10787 a type. */
10789 static bool
10790 validate_arg (const_tree arg, enum tree_code code)
10792 if (!arg)
10793 return false;
10794 else if (code == POINTER_TYPE)
10795 return POINTER_TYPE_P (TREE_TYPE (arg));
10796 else if (code == INTEGER_TYPE)
10797 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10798 return code == TREE_CODE (TREE_TYPE (arg));
10801 /* This function validates the types of a function call argument list
10802 against a specified list of tree_codes. If the last specifier is a 0,
10803 that represents an ellipses, otherwise the last specifier must be a
10804 VOID_TYPE.
10806 This is the GIMPLE version of validate_arglist. Eventually we want to
10807 completely convert builtins.c to work from GIMPLEs and the tree based
10808 validate_arglist will then be removed. */
10810 bool
10811 validate_gimple_arglist (const_gimple call, ...)
10813 enum tree_code code;
10814 bool res = 0;
10815 va_list ap;
10816 const_tree arg;
10817 size_t i;
10819 va_start (ap, call);
10820 i = 0;
10824 code = (enum tree_code) va_arg (ap, int);
10825 switch (code)
10827 case 0:
10828 /* This signifies an ellipses, any further arguments are all ok. */
10829 res = true;
10830 goto end;
10831 case VOID_TYPE:
10832 /* This signifies an endlink, if no arguments remain, return
10833 true, otherwise return false. */
10834 res = (i == gimple_call_num_args (call));
10835 goto end;
10836 default:
10837 /* If no parameters remain or the parameter's code does not
10838 match the specified code, return false. Otherwise continue
10839 checking any remaining arguments. */
10840 arg = gimple_call_arg (call, i++);
10841 if (!validate_arg (arg, code))
10842 goto end;
10843 break;
10846 while (1);
10848 /* We need gotos here since we can only have one VA_CLOSE in a
10849 function. */
10850 end: ;
10851 va_end (ap);
10853 return res;
10856 /* This function validates the types of a function call argument list
10857 against a specified list of tree_codes. If the last specifier is a 0,
10858 that represents an ellipses, otherwise the last specifier must be a
10859 VOID_TYPE. */
10861 bool
10862 validate_arglist (const_tree callexpr, ...)
10864 enum tree_code code;
10865 bool res = 0;
10866 va_list ap;
10867 const_call_expr_arg_iterator iter;
10868 const_tree arg;
10870 va_start (ap, callexpr);
10871 init_const_call_expr_arg_iterator (callexpr, &iter);
10875 code = (enum tree_code) va_arg (ap, int);
10876 switch (code)
10878 case 0:
10879 /* This signifies an ellipses, any further arguments are all ok. */
10880 res = true;
10881 goto end;
10882 case VOID_TYPE:
10883 /* This signifies an endlink, if no arguments remain, return
10884 true, otherwise return false. */
10885 res = !more_const_call_expr_args_p (&iter);
10886 goto end;
10887 default:
10888 /* If no parameters remain or the parameter's code does not
10889 match the specified code, return false. Otherwise continue
10890 checking any remaining arguments. */
10891 arg = next_const_call_expr_arg (&iter);
10892 if (!validate_arg (arg, code))
10893 goto end;
10894 break;
10897 while (1);
10899 /* We need gotos here since we can only have one VA_CLOSE in a
10900 function. */
10901 end: ;
10902 va_end (ap);
10904 return res;
10907 /* Default target-specific builtin expander that does nothing. */
10910 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10911 rtx target ATTRIBUTE_UNUSED,
10912 rtx subtarget ATTRIBUTE_UNUSED,
10913 enum machine_mode mode ATTRIBUTE_UNUSED,
10914 int ignore ATTRIBUTE_UNUSED)
10916 return NULL_RTX;
10919 /* Returns true is EXP represents data that would potentially reside
10920 in a readonly section. */
10922 static bool
10923 readonly_data_expr (tree exp)
10925 STRIP_NOPS (exp);
10927 if (TREE_CODE (exp) != ADDR_EXPR)
10928 return false;
10930 exp = get_base_address (TREE_OPERAND (exp, 0));
10931 if (!exp)
10932 return false;
10934 /* Make sure we call decl_readonly_section only for trees it
10935 can handle (since it returns true for everything it doesn't
10936 understand). */
10937 if (TREE_CODE (exp) == STRING_CST
10938 || TREE_CODE (exp) == CONSTRUCTOR
10939 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10940 return decl_readonly_section (exp, 0);
10941 else
10942 return false;
10945 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10946 to the call, and TYPE is its return type.
10948 Return NULL_TREE if no simplification was possible, otherwise return the
10949 simplified form of the call as a tree.
10951 The simplified form may be a constant or other expression which
10952 computes the same value, but in a more efficient manner (including
10953 calls to other builtin functions).
10955 The call may contain arguments which need to be evaluated, but
10956 which are not useful to determine the result of the call. In
10957 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10958 COMPOUND_EXPR will be an argument which must be evaluated.
10959 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10960 COMPOUND_EXPR in the chain will contain the tree for the simplified
10961 form of the builtin function call. */
10963 static tree
10964 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10966 if (!validate_arg (s1, POINTER_TYPE)
10967 || !validate_arg (s2, POINTER_TYPE))
10968 return NULL_TREE;
10969 else
10971 tree fn;
10972 const char *p1, *p2;
10974 p2 = c_getstr (s2);
10975 if (p2 == NULL)
10976 return NULL_TREE;
10978 p1 = c_getstr (s1);
10979 if (p1 != NULL)
10981 const char *r = strstr (p1, p2);
10982 tree tem;
10984 if (r == NULL)
10985 return build_int_cst (TREE_TYPE (s1), 0);
10987 /* Return an offset into the constant string argument. */
10988 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10989 s1, size_int (r - p1));
10990 return fold_convert_loc (loc, type, tem);
10993 /* The argument is const char *, and the result is char *, so we need
10994 a type conversion here to avoid a warning. */
10995 if (p2[0] == '\0')
10996 return fold_convert_loc (loc, type, s1);
10998 if (p2[1] != '\0')
10999 return NULL_TREE;
11001 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11002 if (!fn)
11003 return NULL_TREE;
11005 /* New argument list transforming strstr(s1, s2) to
11006 strchr(s1, s2[0]). */
11007 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11011 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11012 the call, and TYPE is its return type.
11014 Return NULL_TREE if no simplification was possible, otherwise return the
11015 simplified form of the call as a tree.
11017 The simplified form may be a constant or other expression which
11018 computes the same value, but in a more efficient manner (including
11019 calls to other builtin functions).
11021 The call may contain arguments which need to be evaluated, but
11022 which are not useful to determine the result of the call. In
11023 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11024 COMPOUND_EXPR will be an argument which must be evaluated.
11025 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11026 COMPOUND_EXPR in the chain will contain the tree for the simplified
11027 form of the builtin function call. */
11029 static tree
11030 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11032 if (!validate_arg (s1, POINTER_TYPE)
11033 || !validate_arg (s2, INTEGER_TYPE))
11034 return NULL_TREE;
11035 else
11037 const char *p1;
11039 if (TREE_CODE (s2) != INTEGER_CST)
11040 return NULL_TREE;
11042 p1 = c_getstr (s1);
11043 if (p1 != NULL)
11045 char c;
11046 const char *r;
11047 tree tem;
11049 if (target_char_cast (s2, &c))
11050 return NULL_TREE;
11052 r = strchr (p1, c);
11054 if (r == NULL)
11055 return build_int_cst (TREE_TYPE (s1), 0);
11057 /* Return an offset into the constant string argument. */
11058 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11059 s1, size_int (r - p1));
11060 return fold_convert_loc (loc, type, tem);
11062 return NULL_TREE;
11066 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11067 the call, and TYPE is its return type.
11069 Return NULL_TREE if no simplification was possible, otherwise return the
11070 simplified form of the call as a tree.
11072 The simplified form may be a constant or other expression which
11073 computes the same value, but in a more efficient manner (including
11074 calls to other builtin functions).
11076 The call may contain arguments which need to be evaluated, but
11077 which are not useful to determine the result of the call. In
11078 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11079 COMPOUND_EXPR will be an argument which must be evaluated.
11080 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11081 COMPOUND_EXPR in the chain will contain the tree for the simplified
11082 form of the builtin function call. */
11084 static tree
11085 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11087 if (!validate_arg (s1, POINTER_TYPE)
11088 || !validate_arg (s2, INTEGER_TYPE))
11089 return NULL_TREE;
11090 else
11092 tree fn;
11093 const char *p1;
11095 if (TREE_CODE (s2) != INTEGER_CST)
11096 return NULL_TREE;
11098 p1 = c_getstr (s1);
11099 if (p1 != NULL)
11101 char c;
11102 const char *r;
11103 tree tem;
11105 if (target_char_cast (s2, &c))
11106 return NULL_TREE;
11108 r = strrchr (p1, c);
11110 if (r == NULL)
11111 return build_int_cst (TREE_TYPE (s1), 0);
11113 /* Return an offset into the constant string argument. */
11114 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11115 s1, size_int (r - p1));
11116 return fold_convert_loc (loc, type, tem);
11119 if (! integer_zerop (s2))
11120 return NULL_TREE;
11122 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11123 if (!fn)
11124 return NULL_TREE;
11126 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11127 return build_call_expr_loc (loc, fn, 2, s1, s2);
11131 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11132 to the call, and TYPE is its return type.
11134 Return NULL_TREE if no simplification was possible, otherwise return the
11135 simplified form of the call as a tree.
11137 The simplified form may be a constant or other expression which
11138 computes the same value, but in a more efficient manner (including
11139 calls to other builtin functions).
11141 The call may contain arguments which need to be evaluated, but
11142 which are not useful to determine the result of the call. In
11143 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11144 COMPOUND_EXPR will be an argument which must be evaluated.
11145 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11146 COMPOUND_EXPR in the chain will contain the tree for the simplified
11147 form of the builtin function call. */
11149 static tree
11150 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11152 if (!validate_arg (s1, POINTER_TYPE)
11153 || !validate_arg (s2, POINTER_TYPE))
11154 return NULL_TREE;
11155 else
11157 tree fn;
11158 const char *p1, *p2;
11160 p2 = c_getstr (s2);
11161 if (p2 == NULL)
11162 return NULL_TREE;
11164 p1 = c_getstr (s1);
11165 if (p1 != NULL)
11167 const char *r = strpbrk (p1, p2);
11168 tree tem;
11170 if (r == NULL)
11171 return build_int_cst (TREE_TYPE (s1), 0);
11173 /* Return an offset into the constant string argument. */
11174 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11175 s1, size_int (r - p1));
11176 return fold_convert_loc (loc, type, tem);
11179 if (p2[0] == '\0')
11180 /* strpbrk(x, "") == NULL.
11181 Evaluate and ignore s1 in case it had side-effects. */
11182 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11184 if (p2[1] != '\0')
11185 return NULL_TREE; /* Really call strpbrk. */
11187 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11188 if (!fn)
11189 return NULL_TREE;
11191 /* New argument list transforming strpbrk(s1, s2) to
11192 strchr(s1, s2[0]). */
11193 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11197 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11198 to the call.
11200 Return NULL_TREE if no simplification was possible, otherwise return the
11201 simplified form of the call as a tree.
11203 The simplified form may be a constant or other expression which
11204 computes the same value, but in a more efficient manner (including
11205 calls to other builtin functions).
11207 The call may contain arguments which need to be evaluated, but
11208 which are not useful to determine the result of the call. In
11209 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11210 COMPOUND_EXPR will be an argument which must be evaluated.
11211 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11212 COMPOUND_EXPR in the chain will contain the tree for the simplified
11213 form of the builtin function call. */
11215 static tree
11216 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11218 if (!validate_arg (dst, POINTER_TYPE)
11219 || !validate_arg (src, POINTER_TYPE))
11220 return NULL_TREE;
11221 else
11223 const char *p = c_getstr (src);
11225 /* If the string length is zero, return the dst parameter. */
11226 if (p && *p == '\0')
11227 return dst;
11229 if (optimize_insn_for_speed_p ())
11231 /* See if we can store by pieces into (dst + strlen(dst)). */
11232 tree newdst, call;
11233 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11234 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11236 if (!strlen_fn || !strcpy_fn)
11237 return NULL_TREE;
11239 /* If we don't have a movstr we don't want to emit an strcpy
11240 call. We have to do that if the length of the source string
11241 isn't computable (in that case we can use memcpy probably
11242 later expanding to a sequence of mov instructions). If we
11243 have movstr instructions we can emit strcpy calls. */
11244 if (!HAVE_movstr)
11246 tree len = c_strlen (src, 1);
11247 if (! len || TREE_SIDE_EFFECTS (len))
11248 return NULL_TREE;
11251 /* Stabilize the argument list. */
11252 dst = builtin_save_expr (dst);
11254 /* Create strlen (dst). */
11255 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11256 /* Create (dst p+ strlen (dst)). */
11258 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11259 TREE_TYPE (dst), dst, newdst);
11260 newdst = builtin_save_expr (newdst);
11262 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11263 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11265 return NULL_TREE;
11269 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11270 arguments to the call.
11272 Return NULL_TREE if no simplification was possible, otherwise return the
11273 simplified form of the call as a tree.
11275 The simplified form may be a constant or other expression which
11276 computes the same value, but in a more efficient manner (including
11277 calls to other builtin functions).
11279 The call may contain arguments which need to be evaluated, but
11280 which are not useful to determine the result of the call. In
11281 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11282 COMPOUND_EXPR will be an argument which must be evaluated.
11283 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11284 COMPOUND_EXPR in the chain will contain the tree for the simplified
11285 form of the builtin function call. */
11287 static tree
11288 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11290 if (!validate_arg (dst, POINTER_TYPE)
11291 || !validate_arg (src, POINTER_TYPE)
11292 || !validate_arg (len, INTEGER_TYPE))
11293 return NULL_TREE;
11294 else
11296 const char *p = c_getstr (src);
11298 /* If the requested length is zero, or the src parameter string
11299 length is zero, return the dst parameter. */
11300 if (integer_zerop (len) || (p && *p == '\0'))
11301 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11303 /* If the requested len is greater than or equal to the string
11304 length, call strcat. */
11305 if (TREE_CODE (len) == INTEGER_CST && p
11306 && compare_tree_int (len, strlen (p)) >= 0)
11308 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11310 /* If the replacement _DECL isn't initialized, don't do the
11311 transformation. */
11312 if (!fn)
11313 return NULL_TREE;
11315 return build_call_expr_loc (loc, fn, 2, dst, src);
11317 return NULL_TREE;
11321 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11322 to the call.
11324 Return NULL_TREE if no simplification was possible, otherwise return the
11325 simplified form of the call as a tree.
11327 The simplified form may be a constant or other expression which
11328 computes the same value, but in a more efficient manner (including
11329 calls to other builtin functions).
11331 The call may contain arguments which need to be evaluated, but
11332 which are not useful to determine the result of the call. In
11333 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11334 COMPOUND_EXPR will be an argument which must be evaluated.
11335 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11336 COMPOUND_EXPR in the chain will contain the tree for the simplified
11337 form of the builtin function call. */
11339 static tree
11340 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11342 if (!validate_arg (s1, POINTER_TYPE)
11343 || !validate_arg (s2, POINTER_TYPE))
11344 return NULL_TREE;
11345 else
11347 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11349 /* If both arguments are constants, evaluate at compile-time. */
11350 if (p1 && p2)
11352 const size_t r = strspn (p1, p2);
11353 return size_int (r);
11356 /* If either argument is "", return NULL_TREE. */
11357 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11358 /* Evaluate and ignore both arguments in case either one has
11359 side-effects. */
11360 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11361 s1, s2);
11362 return NULL_TREE;
11366 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11367 to the call.
11369 Return NULL_TREE if no simplification was possible, otherwise return the
11370 simplified form of the call as a tree.
11372 The simplified form may be a constant or other expression which
11373 computes the same value, but in a more efficient manner (including
11374 calls to other builtin functions).
11376 The call may contain arguments which need to be evaluated, but
11377 which are not useful to determine the result of the call. In
11378 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11379 COMPOUND_EXPR will be an argument which must be evaluated.
11380 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11381 COMPOUND_EXPR in the chain will contain the tree for the simplified
11382 form of the builtin function call. */
11384 static tree
11385 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11387 if (!validate_arg (s1, POINTER_TYPE)
11388 || !validate_arg (s2, POINTER_TYPE))
11389 return NULL_TREE;
11390 else
11392 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11394 /* If both arguments are constants, evaluate at compile-time. */
11395 if (p1 && p2)
11397 const size_t r = strcspn (p1, p2);
11398 return size_int (r);
11401 /* If the first argument is "", return NULL_TREE. */
11402 if (p1 && *p1 == '\0')
11404 /* Evaluate and ignore argument s2 in case it has
11405 side-effects. */
11406 return omit_one_operand_loc (loc, size_type_node,
11407 size_zero_node, s2);
11410 /* If the second argument is "", return __builtin_strlen(s1). */
11411 if (p2 && *p2 == '\0')
11413 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11415 /* If the replacement _DECL isn't initialized, don't do the
11416 transformation. */
11417 if (!fn)
11418 return NULL_TREE;
11420 return build_call_expr_loc (loc, fn, 1, s1);
11422 return NULL_TREE;
11426 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11427 to the call. IGNORE is true if the value returned
11428 by the builtin will be ignored. UNLOCKED is true is true if this
11429 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11430 the known length of the string. Return NULL_TREE if no simplification
11431 was possible. */
11433 tree
11434 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11435 bool ignore, bool unlocked, tree len)
11437 /* If we're using an unlocked function, assume the other unlocked
11438 functions exist explicitly. */
11439 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11440 : implicit_built_in_decls[BUILT_IN_FPUTC];
11441 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11442 : implicit_built_in_decls[BUILT_IN_FWRITE];
11444 /* If the return value is used, don't do the transformation. */
11445 if (!ignore)
11446 return NULL_TREE;
11448 /* Verify the arguments in the original call. */
11449 if (!validate_arg (arg0, POINTER_TYPE)
11450 || !validate_arg (arg1, POINTER_TYPE))
11451 return NULL_TREE;
11453 if (! len)
11454 len = c_strlen (arg0, 0);
11456 /* Get the length of the string passed to fputs. If the length
11457 can't be determined, punt. */
11458 if (!len
11459 || TREE_CODE (len) != INTEGER_CST)
11460 return NULL_TREE;
11462 switch (compare_tree_int (len, 1))
11464 case -1: /* length is 0, delete the call entirely . */
11465 return omit_one_operand_loc (loc, integer_type_node,
11466 integer_zero_node, arg1);;
11468 case 0: /* length is 1, call fputc. */
11470 const char *p = c_getstr (arg0);
11472 if (p != NULL)
11474 if (fn_fputc)
11475 return build_call_expr_loc (loc, fn_fputc, 2,
11476 build_int_cst (NULL_TREE, p[0]), arg1);
11477 else
11478 return NULL_TREE;
11481 /* FALLTHROUGH */
11482 case 1: /* length is greater than 1, call fwrite. */
11484 /* If optimizing for size keep fputs. */
11485 if (optimize_function_for_size_p (cfun))
11486 return NULL_TREE;
11487 /* New argument list transforming fputs(string, stream) to
11488 fwrite(string, 1, len, stream). */
11489 if (fn_fwrite)
11490 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11491 size_one_node, len, arg1);
11492 else
11493 return NULL_TREE;
11495 default:
11496 gcc_unreachable ();
11498 return NULL_TREE;
11501 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11502 produced. False otherwise. This is done so that we don't output the error
11503 or warning twice or three times. */
11505 bool
11506 fold_builtin_next_arg (tree exp, bool va_start_p)
11508 tree fntype = TREE_TYPE (current_function_decl);
11509 int nargs = call_expr_nargs (exp);
11510 tree arg;
11512 if (TYPE_ARG_TYPES (fntype) == 0
11513 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11514 == void_type_node))
11516 error ("%<va_start%> used in function with fixed args");
11517 return true;
11520 if (va_start_p)
11522 if (va_start_p && (nargs != 2))
11524 error ("wrong number of arguments to function %<va_start%>");
11525 return true;
11527 arg = CALL_EXPR_ARG (exp, 1);
11529 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11530 when we checked the arguments and if needed issued a warning. */
11531 else
11533 if (nargs == 0)
11535 /* Evidently an out of date version of <stdarg.h>; can't validate
11536 va_start's second argument, but can still work as intended. */
11537 warning (0, "%<__builtin_next_arg%> called without an argument");
11538 return true;
11540 else if (nargs > 1)
11542 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11543 return true;
11545 arg = CALL_EXPR_ARG (exp, 0);
11548 if (TREE_CODE (arg) == SSA_NAME)
11549 arg = SSA_NAME_VAR (arg);
11551 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11552 or __builtin_next_arg (0) the first time we see it, after checking
11553 the arguments and if needed issuing a warning. */
11554 if (!integer_zerop (arg))
11556 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11558 /* Strip off all nops for the sake of the comparison. This
11559 is not quite the same as STRIP_NOPS. It does more.
11560 We must also strip off INDIRECT_EXPR for C++ reference
11561 parameters. */
11562 while (CONVERT_EXPR_P (arg)
11563 || TREE_CODE (arg) == INDIRECT_REF)
11564 arg = TREE_OPERAND (arg, 0);
11565 if (arg != last_parm)
11567 /* FIXME: Sometimes with the tree optimizers we can get the
11568 not the last argument even though the user used the last
11569 argument. We just warn and set the arg to be the last
11570 argument so that we will get wrong-code because of
11571 it. */
11572 warning (0, "second parameter of %<va_start%> not last named argument");
11575 /* Undefined by C99 7.15.1.4p4 (va_start):
11576 "If the parameter parmN is declared with the register storage
11577 class, with a function or array type, or with a type that is
11578 not compatible with the type that results after application of
11579 the default argument promotions, the behavior is undefined."
11581 else if (DECL_REGISTER (arg))
11582 warning (0, "undefined behaviour when second parameter of "
11583 "%<va_start%> is declared with %<register%> storage");
11585 /* We want to verify the second parameter just once before the tree
11586 optimizers are run and then avoid keeping it in the tree,
11587 as otherwise we could warn even for correct code like:
11588 void foo (int i, ...)
11589 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11590 if (va_start_p)
11591 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11592 else
11593 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11595 return false;
11599 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11600 ORIG may be null if this is a 2-argument call. We don't attempt to
11601 simplify calls with more than 3 arguments.
11603 Return NULL_TREE if no simplification was possible, otherwise return the
11604 simplified form of the call as a tree. If IGNORED is true, it means that
11605 the caller does not use the returned value of the function. */
11607 static tree
11608 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11609 tree orig, int ignored)
11611 tree call, retval;
11612 const char *fmt_str = NULL;
11614 /* Verify the required arguments in the original call. We deal with two
11615 types of sprintf() calls: 'sprintf (str, fmt)' and
11616 'sprintf (dest, "%s", orig)'. */
11617 if (!validate_arg (dest, POINTER_TYPE)
11618 || !validate_arg (fmt, POINTER_TYPE))
11619 return NULL_TREE;
11620 if (orig && !validate_arg (orig, POINTER_TYPE))
11621 return NULL_TREE;
11623 /* Check whether the format is a literal string constant. */
11624 fmt_str = c_getstr (fmt);
11625 if (fmt_str == NULL)
11626 return NULL_TREE;
11628 call = NULL_TREE;
11629 retval = NULL_TREE;
11631 if (!init_target_chars ())
11632 return NULL_TREE;
11634 /* If the format doesn't contain % args or %%, use strcpy. */
11635 if (strchr (fmt_str, target_percent) == NULL)
11637 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11639 if (!fn)
11640 return NULL_TREE;
11642 /* Don't optimize sprintf (buf, "abc", ptr++). */
11643 if (orig)
11644 return NULL_TREE;
11646 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11647 'format' is known to contain no % formats. */
11648 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11649 if (!ignored)
11650 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11653 /* If the format is "%s", use strcpy if the result isn't used. */
11654 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11656 tree fn;
11657 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11659 if (!fn)
11660 return NULL_TREE;
11662 /* Don't crash on sprintf (str1, "%s"). */
11663 if (!orig)
11664 return NULL_TREE;
11666 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11667 if (!ignored)
11669 retval = c_strlen (orig, 1);
11670 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11671 return NULL_TREE;
11673 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11676 if (call && retval)
11678 retval = fold_convert_loc
11679 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11680 retval);
11681 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11683 else
11684 return call;
11687 /* Expand a call EXP to __builtin_object_size. */
11690 expand_builtin_object_size (tree exp)
11692 tree ost;
11693 int object_size_type;
11694 tree fndecl = get_callee_fndecl (exp);
11696 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11698 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11699 exp, fndecl);
11700 expand_builtin_trap ();
11701 return const0_rtx;
11704 ost = CALL_EXPR_ARG (exp, 1);
11705 STRIP_NOPS (ost);
11707 if (TREE_CODE (ost) != INTEGER_CST
11708 || tree_int_cst_sgn (ost) < 0
11709 || compare_tree_int (ost, 3) > 0)
11711 error ("%Klast argument of %D is not integer constant between 0 and 3",
11712 exp, fndecl);
11713 expand_builtin_trap ();
11714 return const0_rtx;
11717 object_size_type = tree_low_cst (ost, 0);
11719 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11722 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11723 FCODE is the BUILT_IN_* to use.
11724 Return NULL_RTX if we failed; the caller should emit a normal call,
11725 otherwise try to get the result in TARGET, if convenient (and in
11726 mode MODE if that's convenient). */
11728 static rtx
11729 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11730 enum built_in_function fcode)
11732 tree dest, src, len, size;
11734 if (!validate_arglist (exp,
11735 POINTER_TYPE,
11736 fcode == BUILT_IN_MEMSET_CHK
11737 ? INTEGER_TYPE : POINTER_TYPE,
11738 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11739 return NULL_RTX;
11741 dest = CALL_EXPR_ARG (exp, 0);
11742 src = CALL_EXPR_ARG (exp, 1);
11743 len = CALL_EXPR_ARG (exp, 2);
11744 size = CALL_EXPR_ARG (exp, 3);
11746 if (! host_integerp (size, 1))
11747 return NULL_RTX;
11749 if (host_integerp (len, 1) || integer_all_onesp (size))
11751 tree fn;
11753 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11755 warning_at (tree_nonartificial_location (exp),
11756 0, "%Kcall to %D will always overflow destination buffer",
11757 exp, get_callee_fndecl (exp));
11758 return NULL_RTX;
11761 fn = NULL_TREE;
11762 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11763 mem{cpy,pcpy,move,set} is available. */
11764 switch (fcode)
11766 case BUILT_IN_MEMCPY_CHK:
11767 fn = built_in_decls[BUILT_IN_MEMCPY];
11768 break;
11769 case BUILT_IN_MEMPCPY_CHK:
11770 fn = built_in_decls[BUILT_IN_MEMPCPY];
11771 break;
11772 case BUILT_IN_MEMMOVE_CHK:
11773 fn = built_in_decls[BUILT_IN_MEMMOVE];
11774 break;
11775 case BUILT_IN_MEMSET_CHK:
11776 fn = built_in_decls[BUILT_IN_MEMSET];
11777 break;
11778 default:
11779 break;
11782 if (! fn)
11783 return NULL_RTX;
11785 fn = build_call_nofold (fn, 3, dest, src, len);
11786 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11787 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11788 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11790 else if (fcode == BUILT_IN_MEMSET_CHK)
11791 return NULL_RTX;
11792 else
11794 unsigned int dest_align
11795 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11797 /* If DEST is not a pointer type, call the normal function. */
11798 if (dest_align == 0)
11799 return NULL_RTX;
11801 /* If SRC and DEST are the same (and not volatile), do nothing. */
11802 if (operand_equal_p (src, dest, 0))
11804 tree expr;
11806 if (fcode != BUILT_IN_MEMPCPY_CHK)
11808 /* Evaluate and ignore LEN in case it has side-effects. */
11809 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11810 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11813 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11814 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11817 /* __memmove_chk special case. */
11818 if (fcode == BUILT_IN_MEMMOVE_CHK)
11820 unsigned int src_align
11821 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11823 if (src_align == 0)
11824 return NULL_RTX;
11826 /* If src is categorized for a readonly section we can use
11827 normal __memcpy_chk. */
11828 if (readonly_data_expr (src))
11830 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11831 if (!fn)
11832 return NULL_RTX;
11833 fn = build_call_nofold (fn, 4, dest, src, len, size);
11834 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11835 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11836 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11839 return NULL_RTX;
11843 /* Emit warning if a buffer overflow is detected at compile time. */
11845 static void
11846 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11848 int is_strlen = 0;
11849 tree len, size;
11850 location_t loc = tree_nonartificial_location (exp);
11852 switch (fcode)
11854 case BUILT_IN_STRCPY_CHK:
11855 case BUILT_IN_STPCPY_CHK:
11856 /* For __strcat_chk the warning will be emitted only if overflowing
11857 by at least strlen (dest) + 1 bytes. */
11858 case BUILT_IN_STRCAT_CHK:
11859 len = CALL_EXPR_ARG (exp, 1);
11860 size = CALL_EXPR_ARG (exp, 2);
11861 is_strlen = 1;
11862 break;
11863 case BUILT_IN_STRNCAT_CHK:
11864 case BUILT_IN_STRNCPY_CHK:
11865 len = CALL_EXPR_ARG (exp, 2);
11866 size = CALL_EXPR_ARG (exp, 3);
11867 break;
11868 case BUILT_IN_SNPRINTF_CHK:
11869 case BUILT_IN_VSNPRINTF_CHK:
11870 len = CALL_EXPR_ARG (exp, 1);
11871 size = CALL_EXPR_ARG (exp, 3);
11872 break;
11873 default:
11874 gcc_unreachable ();
11877 if (!len || !size)
11878 return;
11880 if (! host_integerp (size, 1) || integer_all_onesp (size))
11881 return;
11883 if (is_strlen)
11885 len = c_strlen (len, 1);
11886 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11887 return;
11889 else if (fcode == BUILT_IN_STRNCAT_CHK)
11891 tree src = CALL_EXPR_ARG (exp, 1);
11892 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11893 return;
11894 src = c_strlen (src, 1);
11895 if (! src || ! host_integerp (src, 1))
11897 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11898 exp, get_callee_fndecl (exp));
11899 return;
11901 else if (tree_int_cst_lt (src, size))
11902 return;
11904 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11905 return;
11907 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11908 exp, get_callee_fndecl (exp));
11911 /* Emit warning if a buffer overflow is detected at compile time
11912 in __sprintf_chk/__vsprintf_chk calls. */
11914 static void
11915 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11917 tree size, len, fmt;
11918 const char *fmt_str;
11919 int nargs = call_expr_nargs (exp);
11921 /* Verify the required arguments in the original call. */
11923 if (nargs < 4)
11924 return;
11925 size = CALL_EXPR_ARG (exp, 2);
11926 fmt = CALL_EXPR_ARG (exp, 3);
11928 if (! host_integerp (size, 1) || integer_all_onesp (size))
11929 return;
11931 /* Check whether the format is a literal string constant. */
11932 fmt_str = c_getstr (fmt);
11933 if (fmt_str == NULL)
11934 return;
11936 if (!init_target_chars ())
11937 return;
11939 /* If the format doesn't contain % args or %%, we know its size. */
11940 if (strchr (fmt_str, target_percent) == 0)
11941 len = build_int_cstu (size_type_node, strlen (fmt_str));
11942 /* If the format is "%s" and first ... argument is a string literal,
11943 we know it too. */
11944 else if (fcode == BUILT_IN_SPRINTF_CHK
11945 && strcmp (fmt_str, target_percent_s) == 0)
11947 tree arg;
11949 if (nargs < 5)
11950 return;
11951 arg = CALL_EXPR_ARG (exp, 4);
11952 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11953 return;
11955 len = c_strlen (arg, 1);
11956 if (!len || ! host_integerp (len, 1))
11957 return;
11959 else
11960 return;
11962 if (! tree_int_cst_lt (len, size))
11963 warning_at (tree_nonartificial_location (exp),
11964 0, "%Kcall to %D will always overflow destination buffer",
11965 exp, get_callee_fndecl (exp));
11968 /* Emit warning if a free is called with address of a variable. */
11970 static void
11971 maybe_emit_free_warning (tree exp)
11973 tree arg = CALL_EXPR_ARG (exp, 0);
11975 STRIP_NOPS (arg);
11976 if (TREE_CODE (arg) != ADDR_EXPR)
11977 return;
11979 arg = get_base_address (TREE_OPERAND (arg, 0));
11980 if (arg == NULL || INDIRECT_REF_P (arg))
11981 return;
11983 if (SSA_VAR_P (arg))
11984 warning_at (tree_nonartificial_location (exp),
11985 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11986 else
11987 warning_at (tree_nonartificial_location (exp),
11988 0, "%Kattempt to free a non-heap object", exp);
11991 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11992 if possible. */
11994 tree
11995 fold_builtin_object_size (tree ptr, tree ost)
11997 tree ret = NULL_TREE;
11998 int object_size_type;
12000 if (!validate_arg (ptr, POINTER_TYPE)
12001 || !validate_arg (ost, INTEGER_TYPE))
12002 return NULL_TREE;
12004 STRIP_NOPS (ost);
12006 if (TREE_CODE (ost) != INTEGER_CST
12007 || tree_int_cst_sgn (ost) < 0
12008 || compare_tree_int (ost, 3) > 0)
12009 return NULL_TREE;
12011 object_size_type = tree_low_cst (ost, 0);
12013 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12014 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12015 and (size_t) 0 for types 2 and 3. */
12016 if (TREE_SIDE_EFFECTS (ptr))
12017 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12019 if (TREE_CODE (ptr) == ADDR_EXPR)
12020 ret = build_int_cstu (size_type_node,
12021 compute_builtin_object_size (ptr, object_size_type));
12023 else if (TREE_CODE (ptr) == SSA_NAME)
12025 unsigned HOST_WIDE_INT bytes;
12027 /* If object size is not known yet, delay folding until
12028 later. Maybe subsequent passes will help determining
12029 it. */
12030 bytes = compute_builtin_object_size (ptr, object_size_type);
12031 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
12032 ? -1 : 0))
12033 ret = build_int_cstu (size_type_node, bytes);
12036 if (ret)
12038 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
12039 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
12040 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
12041 ret = NULL_TREE;
12044 return ret;
12047 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12048 DEST, SRC, LEN, and SIZE are the arguments to the call.
12049 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12050 code of the builtin. If MAXLEN is not NULL, it is maximum length
12051 passed as third argument. */
12053 tree
12054 fold_builtin_memory_chk (location_t loc, tree fndecl,
12055 tree dest, tree src, tree len, tree size,
12056 tree maxlen, bool ignore,
12057 enum built_in_function fcode)
12059 tree fn;
12061 if (!validate_arg (dest, POINTER_TYPE)
12062 || !validate_arg (src,
12063 (fcode == BUILT_IN_MEMSET_CHK
12064 ? INTEGER_TYPE : POINTER_TYPE))
12065 || !validate_arg (len, INTEGER_TYPE)
12066 || !validate_arg (size, INTEGER_TYPE))
12067 return NULL_TREE;
12069 /* If SRC and DEST are the same (and not volatile), return DEST
12070 (resp. DEST+LEN for __mempcpy_chk). */
12071 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12073 if (fcode != BUILT_IN_MEMPCPY_CHK)
12074 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12075 dest, len);
12076 else
12078 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12079 dest, len);
12080 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12084 if (! host_integerp (size, 1))
12085 return NULL_TREE;
12087 if (! integer_all_onesp (size))
12089 if (! host_integerp (len, 1))
12091 /* If LEN is not constant, try MAXLEN too.
12092 For MAXLEN only allow optimizing into non-_ocs function
12093 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12094 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12096 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12098 /* (void) __mempcpy_chk () can be optimized into
12099 (void) __memcpy_chk (). */
12100 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12101 if (!fn)
12102 return NULL_TREE;
12104 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12106 return NULL_TREE;
12109 else
12110 maxlen = len;
12112 if (tree_int_cst_lt (size, maxlen))
12113 return NULL_TREE;
12116 fn = NULL_TREE;
12117 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12118 mem{cpy,pcpy,move,set} is available. */
12119 switch (fcode)
12121 case BUILT_IN_MEMCPY_CHK:
12122 fn = built_in_decls[BUILT_IN_MEMCPY];
12123 break;
12124 case BUILT_IN_MEMPCPY_CHK:
12125 fn = built_in_decls[BUILT_IN_MEMPCPY];
12126 break;
12127 case BUILT_IN_MEMMOVE_CHK:
12128 fn = built_in_decls[BUILT_IN_MEMMOVE];
12129 break;
12130 case BUILT_IN_MEMSET_CHK:
12131 fn = built_in_decls[BUILT_IN_MEMSET];
12132 break;
12133 default:
12134 break;
12137 if (!fn)
12138 return NULL_TREE;
12140 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12143 /* Fold a call to the __st[rp]cpy_chk builtin.
12144 DEST, SRC, and SIZE are the arguments to the call.
12145 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12146 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12147 strings passed as second argument. */
12149 tree
12150 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12151 tree src, tree size,
12152 tree maxlen, bool ignore,
12153 enum built_in_function fcode)
12155 tree len, fn;
12157 if (!validate_arg (dest, POINTER_TYPE)
12158 || !validate_arg (src, POINTER_TYPE)
12159 || !validate_arg (size, INTEGER_TYPE))
12160 return NULL_TREE;
12162 /* If SRC and DEST are the same (and not volatile), return DEST. */
12163 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12164 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12166 if (! host_integerp (size, 1))
12167 return NULL_TREE;
12169 if (! integer_all_onesp (size))
12171 len = c_strlen (src, 1);
12172 if (! len || ! host_integerp (len, 1))
12174 /* If LEN is not constant, try MAXLEN too.
12175 For MAXLEN only allow optimizing into non-_ocs function
12176 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12177 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12179 if (fcode == BUILT_IN_STPCPY_CHK)
12181 if (! ignore)
12182 return NULL_TREE;
12184 /* If return value of __stpcpy_chk is ignored,
12185 optimize into __strcpy_chk. */
12186 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12187 if (!fn)
12188 return NULL_TREE;
12190 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12193 if (! len || TREE_SIDE_EFFECTS (len))
12194 return NULL_TREE;
12196 /* If c_strlen returned something, but not a constant,
12197 transform __strcpy_chk into __memcpy_chk. */
12198 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12199 if (!fn)
12200 return NULL_TREE;
12202 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12203 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12204 build_call_expr_loc (loc, fn, 4,
12205 dest, src, len, size));
12208 else
12209 maxlen = len;
12211 if (! tree_int_cst_lt (maxlen, size))
12212 return NULL_TREE;
12215 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12216 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12217 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12218 if (!fn)
12219 return NULL_TREE;
12221 return build_call_expr_loc (loc, fn, 2, dest, src);
12224 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12225 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12226 length passed as third argument. */
12228 tree
12229 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12230 tree len, tree size, tree maxlen)
12232 tree fn;
12234 if (!validate_arg (dest, POINTER_TYPE)
12235 || !validate_arg (src, POINTER_TYPE)
12236 || !validate_arg (len, INTEGER_TYPE)
12237 || !validate_arg (size, INTEGER_TYPE))
12238 return NULL_TREE;
12240 if (! host_integerp (size, 1))
12241 return NULL_TREE;
12243 if (! integer_all_onesp (size))
12245 if (! host_integerp (len, 1))
12247 /* If LEN is not constant, try MAXLEN too.
12248 For MAXLEN only allow optimizing into non-_ocs function
12249 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12250 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12251 return NULL_TREE;
12253 else
12254 maxlen = len;
12256 if (tree_int_cst_lt (size, maxlen))
12257 return NULL_TREE;
12260 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12261 fn = built_in_decls[BUILT_IN_STRNCPY];
12262 if (!fn)
12263 return NULL_TREE;
12265 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12268 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12269 are the arguments to the call. */
12271 static tree
12272 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12273 tree src, tree size)
12275 tree fn;
12276 const char *p;
12278 if (!validate_arg (dest, POINTER_TYPE)
12279 || !validate_arg (src, POINTER_TYPE)
12280 || !validate_arg (size, INTEGER_TYPE))
12281 return NULL_TREE;
12283 p = c_getstr (src);
12284 /* If the SRC parameter is "", return DEST. */
12285 if (p && *p == '\0')
12286 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12288 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12289 return NULL_TREE;
12291 /* If __builtin_strcat_chk is used, assume strcat is available. */
12292 fn = built_in_decls[BUILT_IN_STRCAT];
12293 if (!fn)
12294 return NULL_TREE;
12296 return build_call_expr_loc (loc, fn, 2, dest, src);
12299 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12300 LEN, and SIZE. */
12302 static tree
12303 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12304 tree dest, tree src, tree len, tree size)
12306 tree fn;
12307 const char *p;
12309 if (!validate_arg (dest, POINTER_TYPE)
12310 || !validate_arg (src, POINTER_TYPE)
12311 || !validate_arg (size, INTEGER_TYPE)
12312 || !validate_arg (size, INTEGER_TYPE))
12313 return NULL_TREE;
12315 p = c_getstr (src);
12316 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12317 if (p && *p == '\0')
12318 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12319 else if (integer_zerop (len))
12320 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12322 if (! host_integerp (size, 1))
12323 return NULL_TREE;
12325 if (! integer_all_onesp (size))
12327 tree src_len = c_strlen (src, 1);
12328 if (src_len
12329 && host_integerp (src_len, 1)
12330 && host_integerp (len, 1)
12331 && ! tree_int_cst_lt (len, src_len))
12333 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12334 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12335 if (!fn)
12336 return NULL_TREE;
12338 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12340 return NULL_TREE;
12343 /* If __builtin_strncat_chk is used, assume strncat is available. */
12344 fn = built_in_decls[BUILT_IN_STRNCAT];
12345 if (!fn)
12346 return NULL_TREE;
12348 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12351 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12352 a normal call should be emitted rather than expanding the function
12353 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12355 static tree
12356 fold_builtin_sprintf_chk (location_t loc, tree exp,
12357 enum built_in_function fcode)
12359 tree dest, size, len, fn, fmt, flag;
12360 const char *fmt_str;
12361 int nargs = call_expr_nargs (exp);
12363 /* Verify the required arguments in the original call. */
12364 if (nargs < 4)
12365 return NULL_TREE;
12366 dest = CALL_EXPR_ARG (exp, 0);
12367 if (!validate_arg (dest, POINTER_TYPE))
12368 return NULL_TREE;
12369 flag = CALL_EXPR_ARG (exp, 1);
12370 if (!validate_arg (flag, INTEGER_TYPE))
12371 return NULL_TREE;
12372 size = CALL_EXPR_ARG (exp, 2);
12373 if (!validate_arg (size, INTEGER_TYPE))
12374 return NULL_TREE;
12375 fmt = CALL_EXPR_ARG (exp, 3);
12376 if (!validate_arg (fmt, POINTER_TYPE))
12377 return NULL_TREE;
12379 if (! host_integerp (size, 1))
12380 return NULL_TREE;
12382 len = NULL_TREE;
12384 if (!init_target_chars ())
12385 return NULL_TREE;
12387 /* Check whether the format is a literal string constant. */
12388 fmt_str = c_getstr (fmt);
12389 if (fmt_str != NULL)
12391 /* If the format doesn't contain % args or %%, we know the size. */
12392 if (strchr (fmt_str, target_percent) == 0)
12394 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12395 len = build_int_cstu (size_type_node, strlen (fmt_str));
12397 /* If the format is "%s" and first ... argument is a string literal,
12398 we know the size too. */
12399 else if (fcode == BUILT_IN_SPRINTF_CHK
12400 && strcmp (fmt_str, target_percent_s) == 0)
12402 tree arg;
12404 if (nargs == 5)
12406 arg = CALL_EXPR_ARG (exp, 4);
12407 if (validate_arg (arg, POINTER_TYPE))
12409 len = c_strlen (arg, 1);
12410 if (! len || ! host_integerp (len, 1))
12411 len = NULL_TREE;
12417 if (! integer_all_onesp (size))
12419 if (! len || ! tree_int_cst_lt (len, size))
12420 return NULL_TREE;
12423 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12424 or if format doesn't contain % chars or is "%s". */
12425 if (! integer_zerop (flag))
12427 if (fmt_str == NULL)
12428 return NULL_TREE;
12429 if (strchr (fmt_str, target_percent) != NULL
12430 && strcmp (fmt_str, target_percent_s))
12431 return NULL_TREE;
12434 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12435 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12436 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12437 if (!fn)
12438 return NULL_TREE;
12440 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12443 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12444 a normal call should be emitted rather than expanding the function
12445 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12446 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12447 passed as second argument. */
12449 tree
12450 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12451 enum built_in_function fcode)
12453 tree dest, size, len, fn, fmt, flag;
12454 const char *fmt_str;
12456 /* Verify the required arguments in the original call. */
12457 if (call_expr_nargs (exp) < 5)
12458 return NULL_TREE;
12459 dest = CALL_EXPR_ARG (exp, 0);
12460 if (!validate_arg (dest, POINTER_TYPE))
12461 return NULL_TREE;
12462 len = CALL_EXPR_ARG (exp, 1);
12463 if (!validate_arg (len, INTEGER_TYPE))
12464 return NULL_TREE;
12465 flag = CALL_EXPR_ARG (exp, 2);
12466 if (!validate_arg (flag, INTEGER_TYPE))
12467 return NULL_TREE;
12468 size = CALL_EXPR_ARG (exp, 3);
12469 if (!validate_arg (size, INTEGER_TYPE))
12470 return NULL_TREE;
12471 fmt = CALL_EXPR_ARG (exp, 4);
12472 if (!validate_arg (fmt, POINTER_TYPE))
12473 return NULL_TREE;
12475 if (! host_integerp (size, 1))
12476 return NULL_TREE;
12478 if (! integer_all_onesp (size))
12480 if (! host_integerp (len, 1))
12482 /* If LEN is not constant, try MAXLEN too.
12483 For MAXLEN only allow optimizing into non-_ocs function
12484 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12485 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12486 return NULL_TREE;
12488 else
12489 maxlen = len;
12491 if (tree_int_cst_lt (size, maxlen))
12492 return NULL_TREE;
12495 if (!init_target_chars ())
12496 return NULL_TREE;
12498 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12499 or if format doesn't contain % chars or is "%s". */
12500 if (! integer_zerop (flag))
12502 fmt_str = c_getstr (fmt);
12503 if (fmt_str == NULL)
12504 return NULL_TREE;
12505 if (strchr (fmt_str, target_percent) != NULL
12506 && strcmp (fmt_str, target_percent_s))
12507 return NULL_TREE;
12510 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12511 available. */
12512 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12513 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12514 if (!fn)
12515 return NULL_TREE;
12517 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12520 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12521 FMT and ARG are the arguments to the call; we don't fold cases with
12522 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12524 Return NULL_TREE if no simplification was possible, otherwise return the
12525 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12526 code of the function to be simplified. */
12528 static tree
12529 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12530 tree arg, bool ignore,
12531 enum built_in_function fcode)
12533 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12534 const char *fmt_str = NULL;
12536 /* If the return value is used, don't do the transformation. */
12537 if (! ignore)
12538 return NULL_TREE;
12540 /* Verify the required arguments in the original call. */
12541 if (!validate_arg (fmt, POINTER_TYPE))
12542 return NULL_TREE;
12544 /* Check whether the format is a literal string constant. */
12545 fmt_str = c_getstr (fmt);
12546 if (fmt_str == NULL)
12547 return NULL_TREE;
12549 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12551 /* If we're using an unlocked function, assume the other
12552 unlocked functions exist explicitly. */
12553 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12554 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12556 else
12558 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12559 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12562 if (!init_target_chars ())
12563 return NULL_TREE;
12565 if (strcmp (fmt_str, target_percent_s) == 0
12566 || strchr (fmt_str, target_percent) == NULL)
12568 const char *str;
12570 if (strcmp (fmt_str, target_percent_s) == 0)
12572 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12573 return NULL_TREE;
12575 if (!arg || !validate_arg (arg, POINTER_TYPE))
12576 return NULL_TREE;
12578 str = c_getstr (arg);
12579 if (str == NULL)
12580 return NULL_TREE;
12582 else
12584 /* The format specifier doesn't contain any '%' characters. */
12585 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12586 && arg)
12587 return NULL_TREE;
12588 str = fmt_str;
12591 /* If the string was "", printf does nothing. */
12592 if (str[0] == '\0')
12593 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12595 /* If the string has length of 1, call putchar. */
12596 if (str[1] == '\0')
12598 /* Given printf("c"), (where c is any one character,)
12599 convert "c"[0] to an int and pass that to the replacement
12600 function. */
12601 newarg = build_int_cst (NULL_TREE, str[0]);
12602 if (fn_putchar)
12603 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12605 else
12607 /* If the string was "string\n", call puts("string"). */
12608 size_t len = strlen (str);
12609 if ((unsigned char)str[len - 1] == target_newline)
12611 /* Create a NUL-terminated string that's one char shorter
12612 than the original, stripping off the trailing '\n'. */
12613 char *newstr = XALLOCAVEC (char, len);
12614 memcpy (newstr, str, len - 1);
12615 newstr[len - 1] = 0;
12617 newarg = build_string_literal (len, newstr);
12618 if (fn_puts)
12619 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12621 else
12622 /* We'd like to arrange to call fputs(string,stdout) here,
12623 but we need stdout and don't have a way to get it yet. */
12624 return NULL_TREE;
12628 /* The other optimizations can be done only on the non-va_list variants. */
12629 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12630 return NULL_TREE;
12632 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12633 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12635 if (!arg || !validate_arg (arg, POINTER_TYPE))
12636 return NULL_TREE;
12637 if (fn_puts)
12638 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12641 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12642 else if (strcmp (fmt_str, target_percent_c) == 0)
12644 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12645 return NULL_TREE;
12646 if (fn_putchar)
12647 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12650 if (!call)
12651 return NULL_TREE;
12653 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12656 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12657 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12658 more than 3 arguments, and ARG may be null in the 2-argument case.
12660 Return NULL_TREE if no simplification was possible, otherwise return the
12661 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12662 code of the function to be simplified. */
12664 static tree
12665 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12666 tree fmt, tree arg, bool ignore,
12667 enum built_in_function fcode)
12669 tree fn_fputc, fn_fputs, call = NULL_TREE;
12670 const char *fmt_str = NULL;
12672 /* If the return value is used, don't do the transformation. */
12673 if (! ignore)
12674 return NULL_TREE;
12676 /* Verify the required arguments in the original call. */
12677 if (!validate_arg (fp, POINTER_TYPE))
12678 return NULL_TREE;
12679 if (!validate_arg (fmt, POINTER_TYPE))
12680 return NULL_TREE;
12682 /* Check whether the format is a literal string constant. */
12683 fmt_str = c_getstr (fmt);
12684 if (fmt_str == NULL)
12685 return NULL_TREE;
12687 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12689 /* If we're using an unlocked function, assume the other
12690 unlocked functions exist explicitly. */
12691 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12692 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12694 else
12696 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12697 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12700 if (!init_target_chars ())
12701 return NULL_TREE;
12703 /* If the format doesn't contain % args or %%, use strcpy. */
12704 if (strchr (fmt_str, target_percent) == NULL)
12706 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12707 && arg)
12708 return NULL_TREE;
12710 /* If the format specifier was "", fprintf does nothing. */
12711 if (fmt_str[0] == '\0')
12713 /* If FP has side-effects, just wait until gimplification is
12714 done. */
12715 if (TREE_SIDE_EFFECTS (fp))
12716 return NULL_TREE;
12718 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12721 /* When "string" doesn't contain %, replace all cases of
12722 fprintf (fp, string) with fputs (string, fp). The fputs
12723 builtin will take care of special cases like length == 1. */
12724 if (fn_fputs)
12725 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12728 /* The other optimizations can be done only on the non-va_list variants. */
12729 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12730 return NULL_TREE;
12732 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12733 else if (strcmp (fmt_str, target_percent_s) == 0)
12735 if (!arg || !validate_arg (arg, POINTER_TYPE))
12736 return NULL_TREE;
12737 if (fn_fputs)
12738 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12741 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12742 else if (strcmp (fmt_str, target_percent_c) == 0)
12744 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12745 return NULL_TREE;
12746 if (fn_fputc)
12747 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12750 if (!call)
12751 return NULL_TREE;
12752 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12755 /* Initialize format string characters in the target charset. */
12757 static bool
12758 init_target_chars (void)
12760 static bool init;
12761 if (!init)
12763 target_newline = lang_hooks.to_target_charset ('\n');
12764 target_percent = lang_hooks.to_target_charset ('%');
12765 target_c = lang_hooks.to_target_charset ('c');
12766 target_s = lang_hooks.to_target_charset ('s');
12767 if (target_newline == 0 || target_percent == 0 || target_c == 0
12768 || target_s == 0)
12769 return false;
12771 target_percent_c[0] = target_percent;
12772 target_percent_c[1] = target_c;
12773 target_percent_c[2] = '\0';
12775 target_percent_s[0] = target_percent;
12776 target_percent_s[1] = target_s;
12777 target_percent_s[2] = '\0';
12779 target_percent_s_newline[0] = target_percent;
12780 target_percent_s_newline[1] = target_s;
12781 target_percent_s_newline[2] = target_newline;
12782 target_percent_s_newline[3] = '\0';
12784 init = true;
12786 return true;
12789 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12790 and no overflow/underflow occurred. INEXACT is true if M was not
12791 exactly calculated. TYPE is the tree type for the result. This
12792 function assumes that you cleared the MPFR flags and then
12793 calculated M to see if anything subsequently set a flag prior to
12794 entering this function. Return NULL_TREE if any checks fail. */
12796 static tree
12797 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12799 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12800 overflow/underflow occurred. If -frounding-math, proceed iff the
12801 result of calling FUNC was exact. */
12802 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12803 && (!flag_rounding_math || !inexact))
12805 REAL_VALUE_TYPE rr;
12807 real_from_mpfr (&rr, m, type, GMP_RNDN);
12808 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12809 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12810 but the mpft_t is not, then we underflowed in the
12811 conversion. */
12812 if (real_isfinite (&rr)
12813 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12815 REAL_VALUE_TYPE rmode;
12817 real_convert (&rmode, TYPE_MODE (type), &rr);
12818 /* Proceed iff the specified mode can hold the value. */
12819 if (real_identical (&rmode, &rr))
12820 return build_real (type, rmode);
12823 return NULL_TREE;
12826 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12827 number and no overflow/underflow occurred. INEXACT is true if M
12828 was not exactly calculated. TYPE is the tree type for the result.
12829 This function assumes that you cleared the MPFR flags and then
12830 calculated M to see if anything subsequently set a flag prior to
12831 entering this function. Return NULL_TREE if any checks fail, if
12832 FORCE_CONVERT is true, then bypass the checks. */
12834 static tree
12835 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12837 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12838 overflow/underflow occurred. If -frounding-math, proceed iff the
12839 result of calling FUNC was exact. */
12840 if (force_convert
12841 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12842 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12843 && (!flag_rounding_math || !inexact)))
12845 REAL_VALUE_TYPE re, im;
12847 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12848 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12849 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12850 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12851 but the mpft_t is not, then we underflowed in the
12852 conversion. */
12853 if (force_convert
12854 || (real_isfinite (&re) && real_isfinite (&im)
12855 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12856 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12858 REAL_VALUE_TYPE re_mode, im_mode;
12860 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12861 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12862 /* Proceed iff the specified mode can hold the value. */
12863 if (force_convert
12864 || (real_identical (&re_mode, &re)
12865 && real_identical (&im_mode, &im)))
12866 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12867 build_real (TREE_TYPE (type), im_mode));
12870 return NULL_TREE;
12873 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12874 FUNC on it and return the resulting value as a tree with type TYPE.
12875 If MIN and/or MAX are not NULL, then the supplied ARG must be
12876 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12877 acceptable values, otherwise they are not. The mpfr precision is
12878 set to the precision of TYPE. We assume that function FUNC returns
12879 zero if the result could be calculated exactly within the requested
12880 precision. */
12882 static tree
12883 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12884 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12885 bool inclusive)
12887 tree result = NULL_TREE;
12889 STRIP_NOPS (arg);
12891 /* To proceed, MPFR must exactly represent the target floating point
12892 format, which only happens when the target base equals two. */
12893 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12894 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12896 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12898 if (real_isfinite (ra)
12899 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12900 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12902 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12903 const int prec = fmt->p;
12904 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12905 int inexact;
12906 mpfr_t m;
12908 mpfr_init2 (m, prec);
12909 mpfr_from_real (m, ra, GMP_RNDN);
12910 mpfr_clear_flags ();
12911 inexact = func (m, m, rnd);
12912 result = do_mpfr_ckconv (m, type, inexact);
12913 mpfr_clear (m);
12917 return result;
12920 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12921 FUNC on it and return the resulting value as a tree with type TYPE.
12922 The mpfr precision is set to the precision of TYPE. We assume that
12923 function FUNC returns zero if the result could be calculated
12924 exactly within the requested precision. */
12926 static tree
12927 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12928 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12930 tree result = NULL_TREE;
12932 STRIP_NOPS (arg1);
12933 STRIP_NOPS (arg2);
12935 /* To proceed, MPFR must exactly represent the target floating point
12936 format, which only happens when the target base equals two. */
12937 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12938 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12939 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12941 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12942 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12944 if (real_isfinite (ra1) && real_isfinite (ra2))
12946 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12947 const int prec = fmt->p;
12948 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12949 int inexact;
12950 mpfr_t m1, m2;
12952 mpfr_inits2 (prec, m1, m2, NULL);
12953 mpfr_from_real (m1, ra1, GMP_RNDN);
12954 mpfr_from_real (m2, ra2, GMP_RNDN);
12955 mpfr_clear_flags ();
12956 inexact = func (m1, m1, m2, rnd);
12957 result = do_mpfr_ckconv (m1, type, inexact);
12958 mpfr_clears (m1, m2, NULL);
12962 return result;
12965 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12966 FUNC on it and return the resulting value as a tree with type TYPE.
12967 The mpfr precision is set to the precision of TYPE. We assume that
12968 function FUNC returns zero if the result could be calculated
12969 exactly within the requested precision. */
12971 static tree
12972 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12973 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12975 tree result = NULL_TREE;
12977 STRIP_NOPS (arg1);
12978 STRIP_NOPS (arg2);
12979 STRIP_NOPS (arg3);
12981 /* To proceed, MPFR must exactly represent the target floating point
12982 format, which only happens when the target base equals two. */
12983 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12984 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12985 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12986 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12988 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12989 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12990 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12992 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12994 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12995 const int prec = fmt->p;
12996 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12997 int inexact;
12998 mpfr_t m1, m2, m3;
13000 mpfr_inits2 (prec, m1, m2, m3, NULL);
13001 mpfr_from_real (m1, ra1, GMP_RNDN);
13002 mpfr_from_real (m2, ra2, GMP_RNDN);
13003 mpfr_from_real (m3, ra3, GMP_RNDN);
13004 mpfr_clear_flags ();
13005 inexact = func (m1, m1, m2, m3, rnd);
13006 result = do_mpfr_ckconv (m1, type, inexact);
13007 mpfr_clears (m1, m2, m3, NULL);
13011 return result;
13014 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13015 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13016 If ARG_SINP and ARG_COSP are NULL then the result is returned
13017 as a complex value.
13018 The type is taken from the type of ARG and is used for setting the
13019 precision of the calculation and results. */
13021 static tree
13022 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13024 tree const type = TREE_TYPE (arg);
13025 tree result = NULL_TREE;
13027 STRIP_NOPS (arg);
13029 /* To proceed, MPFR must exactly represent the target floating point
13030 format, which only happens when the target base equals two. */
13031 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13032 && TREE_CODE (arg) == REAL_CST
13033 && !TREE_OVERFLOW (arg))
13035 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13037 if (real_isfinite (ra))
13039 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13040 const int prec = fmt->p;
13041 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13042 tree result_s, result_c;
13043 int inexact;
13044 mpfr_t m, ms, mc;
13046 mpfr_inits2 (prec, m, ms, mc, NULL);
13047 mpfr_from_real (m, ra, GMP_RNDN);
13048 mpfr_clear_flags ();
13049 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13050 result_s = do_mpfr_ckconv (ms, type, inexact);
13051 result_c = do_mpfr_ckconv (mc, type, inexact);
13052 mpfr_clears (m, ms, mc, NULL);
13053 if (result_s && result_c)
13055 /* If we are to return in a complex value do so. */
13056 if (!arg_sinp && !arg_cosp)
13057 return build_complex (build_complex_type (type),
13058 result_c, result_s);
13060 /* Dereference the sin/cos pointer arguments. */
13061 arg_sinp = build_fold_indirect_ref (arg_sinp);
13062 arg_cosp = build_fold_indirect_ref (arg_cosp);
13063 /* Proceed if valid pointer type were passed in. */
13064 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13065 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13067 /* Set the values. */
13068 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13069 result_s);
13070 TREE_SIDE_EFFECTS (result_s) = 1;
13071 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13072 result_c);
13073 TREE_SIDE_EFFECTS (result_c) = 1;
13074 /* Combine the assignments into a compound expr. */
13075 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13076 result_s, result_c));
13081 return result;
13084 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13085 two-argument mpfr order N Bessel function FUNC on them and return
13086 the resulting value as a tree with type TYPE. The mpfr precision
13087 is set to the precision of TYPE. We assume that function FUNC
13088 returns zero if the result could be calculated exactly within the
13089 requested precision. */
13090 static tree
13091 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13092 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13093 const REAL_VALUE_TYPE *min, bool inclusive)
13095 tree result = NULL_TREE;
13097 STRIP_NOPS (arg1);
13098 STRIP_NOPS (arg2);
13100 /* To proceed, MPFR must exactly represent the target floating point
13101 format, which only happens when the target base equals two. */
13102 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13103 && host_integerp (arg1, 0)
13104 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13106 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13107 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13109 if (n == (long)n
13110 && real_isfinite (ra)
13111 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13113 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13114 const int prec = fmt->p;
13115 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13116 int inexact;
13117 mpfr_t m;
13119 mpfr_init2 (m, prec);
13120 mpfr_from_real (m, ra, GMP_RNDN);
13121 mpfr_clear_flags ();
13122 inexact = func (m, n, m, rnd);
13123 result = do_mpfr_ckconv (m, type, inexact);
13124 mpfr_clear (m);
13128 return result;
13131 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13132 the pointer *(ARG_QUO) and return the result. The type is taken
13133 from the type of ARG0 and is used for setting the precision of the
13134 calculation and results. */
13136 static tree
13137 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13139 tree const type = TREE_TYPE (arg0);
13140 tree result = NULL_TREE;
13142 STRIP_NOPS (arg0);
13143 STRIP_NOPS (arg1);
13145 /* To proceed, MPFR must exactly represent the target floating point
13146 format, which only happens when the target base equals two. */
13147 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13148 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13149 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13151 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13152 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13154 if (real_isfinite (ra0) && real_isfinite (ra1))
13156 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13157 const int prec = fmt->p;
13158 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13159 tree result_rem;
13160 long integer_quo;
13161 mpfr_t m0, m1;
13163 mpfr_inits2 (prec, m0, m1, NULL);
13164 mpfr_from_real (m0, ra0, GMP_RNDN);
13165 mpfr_from_real (m1, ra1, GMP_RNDN);
13166 mpfr_clear_flags ();
13167 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13168 /* Remquo is independent of the rounding mode, so pass
13169 inexact=0 to do_mpfr_ckconv(). */
13170 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13171 mpfr_clears (m0, m1, NULL);
13172 if (result_rem)
13174 /* MPFR calculates quo in the host's long so it may
13175 return more bits in quo than the target int can hold
13176 if sizeof(host long) > sizeof(target int). This can
13177 happen even for native compilers in LP64 mode. In
13178 these cases, modulo the quo value with the largest
13179 number that the target int can hold while leaving one
13180 bit for the sign. */
13181 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13182 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13184 /* Dereference the quo pointer argument. */
13185 arg_quo = build_fold_indirect_ref (arg_quo);
13186 /* Proceed iff a valid pointer type was passed in. */
13187 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13189 /* Set the value. */
13190 tree result_quo = fold_build2 (MODIFY_EXPR,
13191 TREE_TYPE (arg_quo), arg_quo,
13192 build_int_cst (NULL, integer_quo));
13193 TREE_SIDE_EFFECTS (result_quo) = 1;
13194 /* Combine the quo assignment with the rem. */
13195 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13196 result_quo, result_rem));
13201 return result;
13204 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13205 resulting value as a tree with type TYPE. The mpfr precision is
13206 set to the precision of TYPE. We assume that this mpfr function
13207 returns zero if the result could be calculated exactly within the
13208 requested precision. In addition, the integer pointer represented
13209 by ARG_SG will be dereferenced and set to the appropriate signgam
13210 (-1,1) value. */
13212 static tree
13213 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13215 tree result = NULL_TREE;
13217 STRIP_NOPS (arg);
13219 /* To proceed, MPFR must exactly represent the target floating point
13220 format, which only happens when the target base equals two. Also
13221 verify ARG is a constant and that ARG_SG is an int pointer. */
13222 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13223 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13224 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13225 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13227 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13229 /* In addition to NaN and Inf, the argument cannot be zero or a
13230 negative integer. */
13231 if (real_isfinite (ra)
13232 && ra->cl != rvc_zero
13233 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13235 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13236 const int prec = fmt->p;
13237 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13238 int inexact, sg;
13239 mpfr_t m;
13240 tree result_lg;
13242 mpfr_init2 (m, prec);
13243 mpfr_from_real (m, ra, GMP_RNDN);
13244 mpfr_clear_flags ();
13245 inexact = mpfr_lgamma (m, &sg, m, rnd);
13246 result_lg = do_mpfr_ckconv (m, type, inexact);
13247 mpfr_clear (m);
13248 if (result_lg)
13250 tree result_sg;
13252 /* Dereference the arg_sg pointer argument. */
13253 arg_sg = build_fold_indirect_ref (arg_sg);
13254 /* Assign the signgam value into *arg_sg. */
13255 result_sg = fold_build2 (MODIFY_EXPR,
13256 TREE_TYPE (arg_sg), arg_sg,
13257 build_int_cst (NULL, sg));
13258 TREE_SIDE_EFFECTS (result_sg) = 1;
13259 /* Combine the signgam assignment with the lgamma result. */
13260 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13261 result_sg, result_lg));
13266 return result;
13269 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13270 function FUNC on it and return the resulting value as a tree with
13271 type TYPE. The mpfr precision is set to the precision of TYPE. We
13272 assume that function FUNC returns zero if the result could be
13273 calculated exactly within the requested precision. */
13275 static tree
13276 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13278 tree result = NULL_TREE;
13280 STRIP_NOPS (arg);
13282 /* To proceed, MPFR must exactly represent the target floating point
13283 format, which only happens when the target base equals two. */
13284 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13285 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13286 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13288 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13289 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13291 if (real_isfinite (re) && real_isfinite (im))
13293 const struct real_format *const fmt =
13294 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13295 const int prec = fmt->p;
13296 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13297 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13298 int inexact;
13299 mpc_t m;
13301 mpc_init2 (m, prec);
13302 mpfr_from_real (mpc_realref(m), re, rnd);
13303 mpfr_from_real (mpc_imagref(m), im, rnd);
13304 mpfr_clear_flags ();
13305 inexact = func (m, m, crnd);
13306 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13307 mpc_clear (m);
13311 return result;
13314 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13315 mpc function FUNC on it and return the resulting value as a tree
13316 with type TYPE. The mpfr precision is set to the precision of
13317 TYPE. We assume that function FUNC returns zero if the result
13318 could be calculated exactly within the requested precision. If
13319 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13320 in the arguments and/or results. */
13322 tree
13323 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13324 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13326 tree result = NULL_TREE;
13328 STRIP_NOPS (arg0);
13329 STRIP_NOPS (arg1);
13331 /* To proceed, MPFR must exactly represent the target floating point
13332 format, which only happens when the target base equals two. */
13333 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13334 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13335 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13336 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13337 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13339 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13340 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13341 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13342 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13344 if (do_nonfinite
13345 || (real_isfinite (re0) && real_isfinite (im0)
13346 && real_isfinite (re1) && real_isfinite (im1)))
13348 const struct real_format *const fmt =
13349 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13350 const int prec = fmt->p;
13351 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13352 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13353 int inexact;
13354 mpc_t m0, m1;
13356 mpc_init2 (m0, prec);
13357 mpc_init2 (m1, prec);
13358 mpfr_from_real (mpc_realref(m0), re0, rnd);
13359 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13360 mpfr_from_real (mpc_realref(m1), re1, rnd);
13361 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13362 mpfr_clear_flags ();
13363 inexact = func (m0, m0, m1, crnd);
13364 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13365 mpc_clear (m0);
13366 mpc_clear (m1);
13370 return result;
13373 /* FIXME tuples.
13374 The functions below provide an alternate interface for folding
13375 builtin function calls presented as GIMPLE_CALL statements rather
13376 than as CALL_EXPRs. The folded result is still expressed as a
13377 tree. There is too much code duplication in the handling of
13378 varargs functions, and a more intrusive re-factoring would permit
13379 better sharing of code between the tree and statement-based
13380 versions of these functions. */
13382 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13383 along with N new arguments specified as the "..." parameters. SKIP
13384 is the number of arguments in STMT to be omitted. This function is used
13385 to do varargs-to-varargs transformations. */
13387 static tree
13388 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13390 int oldnargs = gimple_call_num_args (stmt);
13391 int nargs = oldnargs - skip + n;
13392 tree fntype = TREE_TYPE (fndecl);
13393 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13394 tree *buffer;
13395 int i, j;
13396 va_list ap;
13397 location_t loc = gimple_location (stmt);
13399 buffer = XALLOCAVEC (tree, nargs);
13400 va_start (ap, n);
13401 for (i = 0; i < n; i++)
13402 buffer[i] = va_arg (ap, tree);
13403 va_end (ap);
13404 for (j = skip; j < oldnargs; j++, i++)
13405 buffer[i] = gimple_call_arg (stmt, j);
13407 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13410 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13411 a normal call should be emitted rather than expanding the function
13412 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13414 static tree
13415 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13417 tree dest, size, len, fn, fmt, flag;
13418 const char *fmt_str;
13419 int nargs = gimple_call_num_args (stmt);
13421 /* Verify the required arguments in the original call. */
13422 if (nargs < 4)
13423 return NULL_TREE;
13424 dest = gimple_call_arg (stmt, 0);
13425 if (!validate_arg (dest, POINTER_TYPE))
13426 return NULL_TREE;
13427 flag = gimple_call_arg (stmt, 1);
13428 if (!validate_arg (flag, INTEGER_TYPE))
13429 return NULL_TREE;
13430 size = gimple_call_arg (stmt, 2);
13431 if (!validate_arg (size, INTEGER_TYPE))
13432 return NULL_TREE;
13433 fmt = gimple_call_arg (stmt, 3);
13434 if (!validate_arg (fmt, POINTER_TYPE))
13435 return NULL_TREE;
13437 if (! host_integerp (size, 1))
13438 return NULL_TREE;
13440 len = NULL_TREE;
13442 if (!init_target_chars ())
13443 return NULL_TREE;
13445 /* Check whether the format is a literal string constant. */
13446 fmt_str = c_getstr (fmt);
13447 if (fmt_str != NULL)
13449 /* If the format doesn't contain % args or %%, we know the size. */
13450 if (strchr (fmt_str, target_percent) == 0)
13452 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13453 len = build_int_cstu (size_type_node, strlen (fmt_str));
13455 /* If the format is "%s" and first ... argument is a string literal,
13456 we know the size too. */
13457 else if (fcode == BUILT_IN_SPRINTF_CHK
13458 && strcmp (fmt_str, target_percent_s) == 0)
13460 tree arg;
13462 if (nargs == 5)
13464 arg = gimple_call_arg (stmt, 4);
13465 if (validate_arg (arg, POINTER_TYPE))
13467 len = c_strlen (arg, 1);
13468 if (! len || ! host_integerp (len, 1))
13469 len = NULL_TREE;
13475 if (! integer_all_onesp (size))
13477 if (! len || ! tree_int_cst_lt (len, size))
13478 return NULL_TREE;
13481 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13482 or if format doesn't contain % chars or is "%s". */
13483 if (! integer_zerop (flag))
13485 if (fmt_str == NULL)
13486 return NULL_TREE;
13487 if (strchr (fmt_str, target_percent) != NULL
13488 && strcmp (fmt_str, target_percent_s))
13489 return NULL_TREE;
13492 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13493 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13494 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13495 if (!fn)
13496 return NULL_TREE;
13498 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13501 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13502 a normal call should be emitted rather than expanding the function
13503 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13504 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13505 passed as second argument. */
13507 tree
13508 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13509 enum built_in_function fcode)
13511 tree dest, size, len, fn, fmt, flag;
13512 const char *fmt_str;
13514 /* Verify the required arguments in the original call. */
13515 if (gimple_call_num_args (stmt) < 5)
13516 return NULL_TREE;
13517 dest = gimple_call_arg (stmt, 0);
13518 if (!validate_arg (dest, POINTER_TYPE))
13519 return NULL_TREE;
13520 len = gimple_call_arg (stmt, 1);
13521 if (!validate_arg (len, INTEGER_TYPE))
13522 return NULL_TREE;
13523 flag = gimple_call_arg (stmt, 2);
13524 if (!validate_arg (flag, INTEGER_TYPE))
13525 return NULL_TREE;
13526 size = gimple_call_arg (stmt, 3);
13527 if (!validate_arg (size, INTEGER_TYPE))
13528 return NULL_TREE;
13529 fmt = gimple_call_arg (stmt, 4);
13530 if (!validate_arg (fmt, POINTER_TYPE))
13531 return NULL_TREE;
13533 if (! host_integerp (size, 1))
13534 return NULL_TREE;
13536 if (! integer_all_onesp (size))
13538 if (! host_integerp (len, 1))
13540 /* If LEN is not constant, try MAXLEN too.
13541 For MAXLEN only allow optimizing into non-_ocs function
13542 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13543 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13544 return NULL_TREE;
13546 else
13547 maxlen = len;
13549 if (tree_int_cst_lt (size, maxlen))
13550 return NULL_TREE;
13553 if (!init_target_chars ())
13554 return NULL_TREE;
13556 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13557 or if format doesn't contain % chars or is "%s". */
13558 if (! integer_zerop (flag))
13560 fmt_str = c_getstr (fmt);
13561 if (fmt_str == NULL)
13562 return NULL_TREE;
13563 if (strchr (fmt_str, target_percent) != NULL
13564 && strcmp (fmt_str, target_percent_s))
13565 return NULL_TREE;
13568 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13569 available. */
13570 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13571 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13572 if (!fn)
13573 return NULL_TREE;
13575 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13578 /* Builtins with folding operations that operate on "..." arguments
13579 need special handling; we need to store the arguments in a convenient
13580 data structure before attempting any folding. Fortunately there are
13581 only a few builtins that fall into this category. FNDECL is the
13582 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13583 result of the function call is ignored. */
13585 static tree
13586 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13587 bool ignore ATTRIBUTE_UNUSED)
13589 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13590 tree ret = NULL_TREE;
13592 switch (fcode)
13594 case BUILT_IN_SPRINTF_CHK:
13595 case BUILT_IN_VSPRINTF_CHK:
13596 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13597 break;
13599 case BUILT_IN_SNPRINTF_CHK:
13600 case BUILT_IN_VSNPRINTF_CHK:
13601 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13603 default:
13604 break;
13606 if (ret)
13608 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13609 TREE_NO_WARNING (ret) = 1;
13610 return ret;
13612 return NULL_TREE;
13615 /* A wrapper function for builtin folding that prevents warnings for
13616 "statement without effect" and the like, caused by removing the
13617 call node earlier than the warning is generated. */
13619 tree
13620 fold_call_stmt (gimple stmt, bool ignore)
13622 tree ret = NULL_TREE;
13623 tree fndecl = gimple_call_fndecl (stmt);
13624 location_t loc = gimple_location (stmt);
13625 if (fndecl
13626 && TREE_CODE (fndecl) == FUNCTION_DECL
13627 && DECL_BUILT_IN (fndecl)
13628 && !gimple_call_va_arg_pack_p (stmt))
13630 int nargs = gimple_call_num_args (stmt);
13632 if (avoid_folding_inline_builtin (fndecl))
13633 return NULL_TREE;
13634 /* FIXME: Don't use a list in this interface. */
13635 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13637 tree arglist = NULL_TREE;
13638 int i;
13639 for (i = nargs - 1; i >= 0; i--)
13640 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13641 return targetm.fold_builtin (fndecl, arglist, ignore);
13643 else
13645 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13647 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13648 int i;
13649 for (i = 0; i < nargs; i++)
13650 args[i] = gimple_call_arg (stmt, i);
13651 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13653 if (!ret)
13654 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13655 if (ret)
13657 /* Propagate location information from original call to
13658 expansion of builtin. Otherwise things like
13659 maybe_emit_chk_warning, that operate on the expansion
13660 of a builtin, will use the wrong location information. */
13661 if (gimple_has_location (stmt))
13663 tree realret = ret;
13664 if (TREE_CODE (ret) == NOP_EXPR)
13665 realret = TREE_OPERAND (ret, 0);
13666 if (CAN_HAVE_LOCATION_P (realret)
13667 && !EXPR_HAS_LOCATION (realret))
13668 SET_EXPR_LOCATION (realret, loc);
13669 return realret;
13671 return ret;
13675 return NULL_TREE;
13678 /* Look up the function in built_in_decls that corresponds to DECL
13679 and set ASMSPEC as its user assembler name. DECL must be a
13680 function decl that declares a builtin. */
13682 void
13683 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13685 tree builtin;
13686 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13687 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13688 && asmspec != 0);
13690 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13691 set_user_assembler_name (builtin, asmspec);
13692 switch (DECL_FUNCTION_CODE (decl))
13694 case BUILT_IN_MEMCPY:
13695 init_block_move_fn (asmspec);
13696 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13697 break;
13698 case BUILT_IN_MEMSET:
13699 init_block_clear_fn (asmspec);
13700 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13701 break;
13702 case BUILT_IN_MEMMOVE:
13703 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13704 break;
13705 case BUILT_IN_MEMCMP:
13706 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13707 break;
13708 case BUILT_IN_ABORT:
13709 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13710 break;
13711 case BUILT_IN_FFS:
13712 if (INT_TYPE_SIZE < BITS_PER_WORD)
13714 set_user_assembler_libfunc ("ffs", asmspec);
13715 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13716 MODE_INT, 0), "ffs");
13718 break;
13719 default:
13720 break;