2010-04-19 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / builtins.c
blob5a36cded3caabc3c90a43dc786dbd26cbb7e7257
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
72 #undef DEF_BUILTIN
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
90 #endif
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 enum tree_code);
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
230 bool
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 return false;
241 /* Return true if DECL is a function symbol representing a built-in. */
243 bool
244 is_builtin_fn (tree decl)
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
254 static bool
255 called_as_built_in (tree node)
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
259 will have. */
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
271 unsigned int inner;
273 inner = max_align;
274 if (handled_component_p (exp))
276 HOST_WIDE_INT bitsize, bitpos;
277 tree offset;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
283 if (bitpos)
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
285 while (offset)
287 tree next_offset;
289 if (TREE_CODE (offset) == PLUS_EXPR)
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
294 else
295 next_offset = NULL;
296 if (host_integerp (offset, 1))
298 /* Any overflow in calculating offset_bits won't change
299 the alignment. */
300 unsigned offset_bits
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
303 if (offset_bits)
304 inner = MIN (inner, (offset_bits & -offset_bits));
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
309 /* Any overflow in calculating offset_factor won't change
310 the alignment. */
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
313 * BITS_PER_UNIT);
315 if (offset_factor)
316 inner = MIN (inner, (offset_factor & -offset_factor));
318 else
320 inner = MIN (inner, BITS_PER_UNIT);
321 break;
323 offset = next_offset;
326 if (TREE_CODE (exp) == CONST_DECL)
327 exp = DECL_INITIAL (exp);
328 if (DECL_P (exp)
329 && TREE_CODE (exp) != LABEL_DECL)
330 align = MIN (inner, DECL_ALIGN (exp));
331 #ifdef CONSTANT_ALIGNMENT
332 else if (CONSTANT_CLASS_P (exp))
333 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
334 #endif
335 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
336 || TREE_CODE (exp) == INDIRECT_REF)
337 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
338 else
339 align = MIN (align, inner);
340 return MIN (align, max_align);
343 /* Returns true iff we can trust that alignment information has been
344 calculated properly. */
346 bool
347 can_trust_pointer_alignment (void)
349 /* We rely on TER to compute accurate alignment information. */
350 return (optimize && flag_tree_ter);
353 /* Return the alignment in bits of EXP, a pointer valued expression.
354 But don't return more than MAX_ALIGN no matter what.
355 The alignment returned is, by default, the alignment of the thing that
356 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
358 Otherwise, look at the expression to see if we can do better, i.e., if the
359 expression is actually pointing at an object whose alignment is tighter. */
362 get_pointer_alignment (tree exp, unsigned int max_align)
364 unsigned int align, inner;
366 if (!can_trust_pointer_alignment ())
367 return 0;
369 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
370 return 0;
372 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
373 align = MIN (align, max_align);
375 while (1)
377 switch (TREE_CODE (exp))
379 CASE_CONVERT:
380 exp = TREE_OPERAND (exp, 0);
381 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
382 return align;
384 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
385 align = MIN (inner, max_align);
386 break;
388 case POINTER_PLUS_EXPR:
389 /* If sum of pointer + int, restrict our maximum alignment to that
390 imposed by the integer. If not, we can't do any better than
391 ALIGN. */
392 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
393 return align;
395 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
396 & (max_align / BITS_PER_UNIT - 1))
397 != 0)
398 max_align >>= 1;
400 exp = TREE_OPERAND (exp, 0);
401 break;
403 case ADDR_EXPR:
404 /* See what we are pointing at and look at its alignment. */
405 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
407 default:
408 return align;
413 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
414 way, because it could contain a zero byte in the middle.
415 TREE_STRING_LENGTH is the size of the character array, not the string.
417 ONLY_VALUE should be nonzero if the result is not going to be emitted
418 into the instruction stream and zero if it is going to be expanded.
419 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
420 is returned, otherwise NULL, since
421 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
422 evaluate the side-effects.
424 The value returned is of type `ssizetype'.
426 Unfortunately, string_constant can't access the values of const char
427 arrays with initializers, so neither can we do so here. */
429 tree
430 c_strlen (tree src, int only_value)
432 tree offset_node;
433 HOST_WIDE_INT offset;
434 int max;
435 const char *ptr;
436 location_t loc;
438 STRIP_NOPS (src);
439 if (TREE_CODE (src) == COND_EXPR
440 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
442 tree len1, len2;
444 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
445 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
446 if (tree_int_cst_equal (len1, len2))
447 return len1;
450 if (TREE_CODE (src) == COMPOUND_EXPR
451 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
452 return c_strlen (TREE_OPERAND (src, 1), only_value);
454 if (EXPR_HAS_LOCATION (src))
455 loc = EXPR_LOCATION (src);
456 else
457 loc = input_location;
459 src = string_constant (src, &offset_node);
460 if (src == 0)
461 return NULL_TREE;
463 max = TREE_STRING_LENGTH (src) - 1;
464 ptr = TREE_STRING_POINTER (src);
466 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
468 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
469 compute the offset to the following null if we don't know where to
470 start searching for it. */
471 int i;
473 for (i = 0; i < max; i++)
474 if (ptr[i] == 0)
475 return NULL_TREE;
477 /* We don't know the starting offset, but we do know that the string
478 has no internal zero bytes. We can assume that the offset falls
479 within the bounds of the string; otherwise, the programmer deserves
480 what he gets. Subtract the offset from the length of the string,
481 and return that. This would perhaps not be valid if we were dealing
482 with named arrays in addition to literal string constants. */
484 return size_diffop_loc (loc, size_int (max), offset_node);
487 /* We have a known offset into the string. Start searching there for
488 a null character if we can represent it as a single HOST_WIDE_INT. */
489 if (offset_node == 0)
490 offset = 0;
491 else if (! host_integerp (offset_node, 0))
492 offset = -1;
493 else
494 offset = tree_low_cst (offset_node, 0);
496 /* If the offset is known to be out of bounds, warn, and call strlen at
497 runtime. */
498 if (offset < 0 || offset > max)
500 /* Suppress multiple warnings for propagated constant strings. */
501 if (! TREE_NO_WARNING (src))
503 warning_at (loc, 0, "offset outside bounds of constant string");
504 TREE_NO_WARNING (src) = 1;
506 return NULL_TREE;
509 /* Use strlen to search for the first zero byte. Since any strings
510 constructed with build_string will have nulls appended, we win even
511 if we get handed something like (char[4])"abcd".
513 Since OFFSET is our starting index into the string, no further
514 calculation is needed. */
515 return ssize_int (strlen (ptr + offset));
518 /* Return a char pointer for a C string if it is a string constant
519 or sum of string constant and integer constant. */
521 static const char *
522 c_getstr (tree src)
524 tree offset_node;
526 src = string_constant (src, &offset_node);
527 if (src == 0)
528 return 0;
530 if (offset_node == 0)
531 return TREE_STRING_POINTER (src);
532 else if (!host_integerp (offset_node, 1)
533 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
534 return 0;
536 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
539 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
540 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
542 static rtx
543 c_readstr (const char *str, enum machine_mode mode)
545 HOST_WIDE_INT c[2];
546 HOST_WIDE_INT ch;
547 unsigned int i, j;
549 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
551 c[0] = 0;
552 c[1] = 0;
553 ch = 1;
554 for (i = 0; i < GET_MODE_SIZE (mode); i++)
556 j = i;
557 if (WORDS_BIG_ENDIAN)
558 j = GET_MODE_SIZE (mode) - i - 1;
559 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
560 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
561 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
562 j *= BITS_PER_UNIT;
563 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
565 if (ch)
566 ch = (unsigned char) str[i];
567 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
569 return immed_double_const (c[0], c[1], mode);
572 /* Cast a target constant CST to target CHAR and if that value fits into
573 host char type, return zero and put that value into variable pointed to by
574 P. */
576 static int
577 target_char_cast (tree cst, char *p)
579 unsigned HOST_WIDE_INT val, hostval;
581 if (!host_integerp (cst, 1)
582 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
583 return 1;
585 val = tree_low_cst (cst, 1);
586 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
587 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
589 hostval = val;
590 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
591 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
593 if (val != hostval)
594 return 1;
596 *p = hostval;
597 return 0;
600 /* Similar to save_expr, but assumes that arbitrary code is not executed
601 in between the multiple evaluations. In particular, we assume that a
602 non-addressable local variable will not be modified. */
604 static tree
605 builtin_save_expr (tree exp)
607 if (TREE_ADDRESSABLE (exp) == 0
608 && (TREE_CODE (exp) == PARM_DECL
609 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
610 return exp;
612 return save_expr (exp);
615 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
616 times to get the address of either a higher stack frame, or a return
617 address located within it (depending on FNDECL_CODE). */
619 static rtx
620 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
622 int i;
624 #ifdef INITIAL_FRAME_ADDRESS_RTX
625 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
626 #else
627 rtx tem;
629 /* For a zero count with __builtin_return_address, we don't care what
630 frame address we return, because target-specific definitions will
631 override us. Therefore frame pointer elimination is OK, and using
632 the soft frame pointer is OK.
634 For a nonzero count, or a zero count with __builtin_frame_address,
635 we require a stable offset from the current frame pointer to the
636 previous one, so we must use the hard frame pointer, and
637 we must disable frame pointer elimination. */
638 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
639 tem = frame_pointer_rtx;
640 else
642 tem = hard_frame_pointer_rtx;
644 /* Tell reload not to eliminate the frame pointer. */
645 crtl->accesses_prior_frames = 1;
647 #endif
649 /* Some machines need special handling before we can access
650 arbitrary frames. For example, on the SPARC, we must first flush
651 all register windows to the stack. */
652 #ifdef SETUP_FRAME_ADDRESSES
653 if (count > 0)
654 SETUP_FRAME_ADDRESSES ();
655 #endif
657 /* On the SPARC, the return address is not in the frame, it is in a
658 register. There is no way to access it off of the current frame
659 pointer, but it can be accessed off the previous frame pointer by
660 reading the value from the register window save area. */
661 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
662 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
663 count--;
664 #endif
666 /* Scan back COUNT frames to the specified frame. */
667 for (i = 0; i < count; i++)
669 /* Assume the dynamic chain pointer is in the word that the
670 frame address points to, unless otherwise specified. */
671 #ifdef DYNAMIC_CHAIN_ADDRESS
672 tem = DYNAMIC_CHAIN_ADDRESS (tem);
673 #endif
674 tem = memory_address (Pmode, tem);
675 tem = gen_frame_mem (Pmode, tem);
676 tem = copy_to_reg (tem);
679 /* For __builtin_frame_address, return what we've got. But, on
680 the SPARC for example, we may have to add a bias. */
681 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
682 #ifdef FRAME_ADDR_RTX
683 return FRAME_ADDR_RTX (tem);
684 #else
685 return tem;
686 #endif
688 /* For __builtin_return_address, get the return address from that frame. */
689 #ifdef RETURN_ADDR_RTX
690 tem = RETURN_ADDR_RTX (count, tem);
691 #else
692 tem = memory_address (Pmode,
693 plus_constant (tem, GET_MODE_SIZE (Pmode)));
694 tem = gen_frame_mem (Pmode, tem);
695 #endif
696 return tem;
699 /* Alias set used for setjmp buffer. */
700 static alias_set_type setjmp_alias_set = -1;
702 /* Construct the leading half of a __builtin_setjmp call. Control will
703 return to RECEIVER_LABEL. This is also called directly by the SJLJ
704 exception handling code. */
706 void
707 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
709 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
710 rtx stack_save;
711 rtx mem;
713 if (setjmp_alias_set == -1)
714 setjmp_alias_set = new_alias_set ();
716 buf_addr = convert_memory_address (Pmode, buf_addr);
718 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
720 /* We store the frame pointer and the address of receiver_label in
721 the buffer and use the rest of it for the stack save area, which
722 is machine-dependent. */
724 mem = gen_rtx_MEM (Pmode, buf_addr);
725 set_mem_alias_set (mem, setjmp_alias_set);
726 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
728 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
729 set_mem_alias_set (mem, setjmp_alias_set);
731 emit_move_insn (validize_mem (mem),
732 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
734 stack_save = gen_rtx_MEM (sa_mode,
735 plus_constant (buf_addr,
736 2 * GET_MODE_SIZE (Pmode)));
737 set_mem_alias_set (stack_save, setjmp_alias_set);
738 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
740 /* If there is further processing to do, do it. */
741 #ifdef HAVE_builtin_setjmp_setup
742 if (HAVE_builtin_setjmp_setup)
743 emit_insn (gen_builtin_setjmp_setup (buf_addr));
744 #endif
746 /* Tell optimize_save_area_alloca that extra work is going to
747 need to go on during alloca. */
748 cfun->calls_setjmp = 1;
750 /* We have a nonlocal label. */
751 cfun->has_nonlocal_label = 1;
754 /* Construct the trailing part of a __builtin_setjmp call. This is
755 also called directly by the SJLJ exception handling code. */
757 void
758 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
760 rtx chain;
762 /* Clobber the FP when we get here, so we have to make sure it's
763 marked as used by this function. */
764 emit_use (hard_frame_pointer_rtx);
766 /* Mark the static chain as clobbered here so life information
767 doesn't get messed up for it. */
768 chain = targetm.calls.static_chain (current_function_decl, true);
769 if (chain && REG_P (chain))
770 emit_clobber (chain);
772 /* Now put in the code to restore the frame pointer, and argument
773 pointer, if needed. */
774 #ifdef HAVE_nonlocal_goto
775 if (! HAVE_nonlocal_goto)
776 #endif
778 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
779 /* This might change the hard frame pointer in ways that aren't
780 apparent to early optimization passes, so force a clobber. */
781 emit_clobber (hard_frame_pointer_rtx);
784 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
785 if (fixed_regs[ARG_POINTER_REGNUM])
787 #ifdef ELIMINABLE_REGS
788 size_t i;
789 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
791 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
792 if (elim_regs[i].from == ARG_POINTER_REGNUM
793 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
794 break;
796 if (i == ARRAY_SIZE (elim_regs))
797 #endif
799 /* Now restore our arg pointer from the address at which it
800 was saved in our stack frame. */
801 emit_move_insn (crtl->args.internal_arg_pointer,
802 copy_to_reg (get_arg_pointer_save_area ()));
805 #endif
807 #ifdef HAVE_builtin_setjmp_receiver
808 if (HAVE_builtin_setjmp_receiver)
809 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
810 else
811 #endif
812 #ifdef HAVE_nonlocal_goto_receiver
813 if (HAVE_nonlocal_goto_receiver)
814 emit_insn (gen_nonlocal_goto_receiver ());
815 else
816 #endif
817 { /* Nothing */ }
819 /* We must not allow the code we just generated to be reordered by
820 scheduling. Specifically, the update of the frame pointer must
821 happen immediately, not later. */
822 emit_insn (gen_blockage ());
825 /* __builtin_longjmp is passed a pointer to an array of five words (not
826 all will be used on all machines). It operates similarly to the C
827 library function of the same name, but is more efficient. Much of
828 the code below is copied from the handling of non-local gotos. */
830 static void
831 expand_builtin_longjmp (rtx buf_addr, rtx value)
833 rtx fp, lab, stack, insn, last;
834 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
836 /* DRAP is needed for stack realign if longjmp is expanded to current
837 function */
838 if (SUPPORTS_STACK_ALIGNMENT)
839 crtl->need_drap = true;
841 if (setjmp_alias_set == -1)
842 setjmp_alias_set = new_alias_set ();
844 buf_addr = convert_memory_address (Pmode, buf_addr);
846 buf_addr = force_reg (Pmode, buf_addr);
848 /* We require that the user must pass a second argument of 1, because
849 that is what builtin_setjmp will return. */
850 gcc_assert (value == const1_rtx);
852 last = get_last_insn ();
853 #ifdef HAVE_builtin_longjmp
854 if (HAVE_builtin_longjmp)
855 emit_insn (gen_builtin_longjmp (buf_addr));
856 else
857 #endif
859 fp = gen_rtx_MEM (Pmode, buf_addr);
860 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
861 GET_MODE_SIZE (Pmode)));
863 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
864 2 * GET_MODE_SIZE (Pmode)));
865 set_mem_alias_set (fp, setjmp_alias_set);
866 set_mem_alias_set (lab, setjmp_alias_set);
867 set_mem_alias_set (stack, setjmp_alias_set);
869 /* Pick up FP, label, and SP from the block and jump. This code is
870 from expand_goto in stmt.c; see there for detailed comments. */
871 #ifdef HAVE_nonlocal_goto
872 if (HAVE_nonlocal_goto)
873 /* We have to pass a value to the nonlocal_goto pattern that will
874 get copied into the static_chain pointer, but it does not matter
875 what that value is, because builtin_setjmp does not use it. */
876 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
877 else
878 #endif
880 lab = copy_to_reg (lab);
882 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
883 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
885 emit_move_insn (hard_frame_pointer_rtx, fp);
886 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
888 emit_use (hard_frame_pointer_rtx);
889 emit_use (stack_pointer_rtx);
890 emit_indirect_jump (lab);
894 /* Search backwards and mark the jump insn as a non-local goto.
895 Note that this precludes the use of __builtin_longjmp to a
896 __builtin_setjmp target in the same function. However, we've
897 already cautioned the user that these functions are for
898 internal exception handling use only. */
899 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
901 gcc_assert (insn != last);
903 if (JUMP_P (insn))
905 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
906 break;
908 else if (CALL_P (insn))
909 break;
913 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
914 and the address of the save area. */
916 static rtx
917 expand_builtin_nonlocal_goto (tree exp)
919 tree t_label, t_save_area;
920 rtx r_label, r_save_area, r_fp, r_sp, insn;
922 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
923 return NULL_RTX;
925 t_label = CALL_EXPR_ARG (exp, 0);
926 t_save_area = CALL_EXPR_ARG (exp, 1);
928 r_label = expand_normal (t_label);
929 r_label = convert_memory_address (Pmode, r_label);
930 r_save_area = expand_normal (t_save_area);
931 r_save_area = convert_memory_address (Pmode, r_save_area);
932 /* Copy the address of the save location to a register just in case it was based
933 on the frame pointer. */
934 r_save_area = copy_to_reg (r_save_area);
935 r_fp = gen_rtx_MEM (Pmode, r_save_area);
936 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
937 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
939 crtl->has_nonlocal_goto = 1;
941 #ifdef HAVE_nonlocal_goto
942 /* ??? We no longer need to pass the static chain value, afaik. */
943 if (HAVE_nonlocal_goto)
944 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
945 else
946 #endif
948 r_label = copy_to_reg (r_label);
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
953 /* Restore frame pointer for containing function.
954 This sets the actual hard register used for the frame pointer
955 to the location of the function's incoming static chain info.
956 The non-local goto handler will then adjust it to contain the
957 proper value and reload the argument pointer, if needed. */
958 emit_move_insn (hard_frame_pointer_rtx, r_fp);
959 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
961 /* USE of hard_frame_pointer_rtx added for consistency;
962 not clear if really needed. */
963 emit_use (hard_frame_pointer_rtx);
964 emit_use (stack_pointer_rtx);
966 /* If the architecture is using a GP register, we must
967 conservatively assume that the target function makes use of it.
968 The prologue of functions with nonlocal gotos must therefore
969 initialize the GP register to the appropriate value, and we
970 must then make sure that this value is live at the point
971 of the jump. (Note that this doesn't necessarily apply
972 to targets with a nonlocal_goto pattern; they are free
973 to implement it in their own way. Note also that this is
974 a no-op if the GP register is a global invariant.) */
975 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
976 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
977 emit_use (pic_offset_table_rtx);
979 emit_indirect_jump (r_label);
982 /* Search backwards to the jump insn and mark it as a
983 non-local goto. */
984 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
986 if (JUMP_P (insn))
988 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
989 break;
991 else if (CALL_P (insn))
992 break;
995 return const0_rtx;
998 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
999 (not all will be used on all machines) that was passed to __builtin_setjmp.
1000 It updates the stack pointer in that block to correspond to the current
1001 stack pointer. */
1003 static void
1004 expand_builtin_update_setjmp_buf (rtx buf_addr)
1006 enum machine_mode sa_mode = Pmode;
1007 rtx stack_save;
1010 #ifdef HAVE_save_stack_nonlocal
1011 if (HAVE_save_stack_nonlocal)
1012 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1013 #endif
1014 #ifdef STACK_SAVEAREA_MODE
1015 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1016 #endif
1018 stack_save
1019 = gen_rtx_MEM (sa_mode,
1020 memory_address
1021 (sa_mode,
1022 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1024 #ifdef HAVE_setjmp
1025 if (HAVE_setjmp)
1026 emit_insn (gen_setjmp ());
1027 #endif
1029 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1032 /* Expand a call to __builtin_prefetch. For a target that does not support
1033 data prefetch, evaluate the memory address argument in case it has side
1034 effects. */
1036 static void
1037 expand_builtin_prefetch (tree exp)
1039 tree arg0, arg1, arg2;
1040 int nargs;
1041 rtx op0, op1, op2;
1043 if (!validate_arglist (exp, POINTER_TYPE, 0))
1044 return;
1046 arg0 = CALL_EXPR_ARG (exp, 0);
1048 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1049 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1050 locality). */
1051 nargs = call_expr_nargs (exp);
1052 if (nargs > 1)
1053 arg1 = CALL_EXPR_ARG (exp, 1);
1054 else
1055 arg1 = integer_zero_node;
1056 if (nargs > 2)
1057 arg2 = CALL_EXPR_ARG (exp, 2);
1058 else
1059 arg2 = build_int_cst (NULL_TREE, 3);
1061 /* Argument 0 is an address. */
1062 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1064 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1065 if (TREE_CODE (arg1) != INTEGER_CST)
1067 error ("second argument to %<__builtin_prefetch%> must be a constant");
1068 arg1 = integer_zero_node;
1070 op1 = expand_normal (arg1);
1071 /* Argument 1 must be either zero or one. */
1072 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1074 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1075 " using zero");
1076 op1 = const0_rtx;
1079 /* Argument 2 (locality) must be a compile-time constant int. */
1080 if (TREE_CODE (arg2) != INTEGER_CST)
1082 error ("third argument to %<__builtin_prefetch%> must be a constant");
1083 arg2 = integer_zero_node;
1085 op2 = expand_normal (arg2);
1086 /* Argument 2 must be 0, 1, 2, or 3. */
1087 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1089 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1090 op2 = const0_rtx;
1093 #ifdef HAVE_prefetch
1094 if (HAVE_prefetch)
1096 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1097 (op0,
1098 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1099 || (GET_MODE (op0) != Pmode))
1101 op0 = convert_memory_address (Pmode, op0);
1102 op0 = force_reg (Pmode, op0);
1104 emit_insn (gen_prefetch (op0, op1, op2));
1106 #endif
1108 /* Don't do anything with direct references to volatile memory, but
1109 generate code to handle other side effects. */
1110 if (!MEM_P (op0) && side_effects_p (op0))
1111 emit_insn (op0);
1114 /* Get a MEM rtx for expression EXP which is the address of an operand
1115 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1116 the maximum length of the block of memory that might be accessed or
1117 NULL if unknown. */
1119 static rtx
1120 get_memory_rtx (tree exp, tree len)
1122 tree orig_exp = exp;
1123 rtx addr, mem;
1124 HOST_WIDE_INT off;
1126 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1127 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1128 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1129 exp = TREE_OPERAND (exp, 0);
1131 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1132 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1134 /* Get an expression we can use to find the attributes to assign to MEM.
1135 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1136 we can. First remove any nops. */
1137 while (CONVERT_EXPR_P (exp)
1138 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1139 exp = TREE_OPERAND (exp, 0);
1141 off = 0;
1142 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1143 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1144 && host_integerp (TREE_OPERAND (exp, 1), 0)
1145 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1146 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1147 else if (TREE_CODE (exp) == ADDR_EXPR)
1148 exp = TREE_OPERAND (exp, 0);
1149 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1150 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1151 else
1152 exp = NULL;
1154 /* Honor attributes derived from exp, except for the alias set
1155 (as builtin stringops may alias with anything) and the size
1156 (as stringops may access multiple array elements). */
1157 if (exp)
1159 set_mem_attributes (mem, exp, 0);
1161 if (off)
1162 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1164 /* Allow the string and memory builtins to overflow from one
1165 field into another, see http://gcc.gnu.org/PR23561.
1166 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1167 memory accessed by the string or memory builtin will fit
1168 within the field. */
1169 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1171 tree mem_expr = MEM_EXPR (mem);
1172 HOST_WIDE_INT offset = -1, length = -1;
1173 tree inner = exp;
1175 while (TREE_CODE (inner) == ARRAY_REF
1176 || CONVERT_EXPR_P (inner)
1177 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1178 || TREE_CODE (inner) == SAVE_EXPR)
1179 inner = TREE_OPERAND (inner, 0);
1181 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1183 if (MEM_OFFSET (mem)
1184 && CONST_INT_P (MEM_OFFSET (mem)))
1185 offset = INTVAL (MEM_OFFSET (mem));
1187 if (offset >= 0 && len && host_integerp (len, 0))
1188 length = tree_low_cst (len, 0);
1190 while (TREE_CODE (inner) == COMPONENT_REF)
1192 tree field = TREE_OPERAND (inner, 1);
1193 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1194 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1196 /* Bitfields are generally not byte-addressable. */
1197 gcc_assert (!DECL_BIT_FIELD (field)
1198 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199 % BITS_PER_UNIT) == 0
1200 && host_integerp (DECL_SIZE (field), 0)
1201 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1202 % BITS_PER_UNIT) == 0));
1204 /* If we can prove that the memory starting at XEXP (mem, 0) and
1205 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1206 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1207 fields without DECL_SIZE_UNIT like flexible array members. */
1208 if (length >= 0
1209 && DECL_SIZE_UNIT (field)
1210 && host_integerp (DECL_SIZE_UNIT (field), 0))
1212 HOST_WIDE_INT size
1213 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1214 if (offset <= size
1215 && length <= size
1216 && offset + length <= size)
1217 break;
1220 if (offset >= 0
1221 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1222 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1223 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1224 / BITS_PER_UNIT;
1225 else
1227 offset = -1;
1228 length = -1;
1231 mem_expr = TREE_OPERAND (mem_expr, 0);
1232 inner = TREE_OPERAND (inner, 0);
1235 if (mem_expr == NULL)
1236 offset = -1;
1237 if (mem_expr != MEM_EXPR (mem))
1239 set_mem_expr (mem, mem_expr);
1240 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1243 set_mem_alias_set (mem, 0);
1244 set_mem_size (mem, NULL_RTX);
1247 return mem;
1250 /* Built-in functions to perform an untyped call and return. */
1252 /* For each register that may be used for calling a function, this
1253 gives a mode used to copy the register's value. VOIDmode indicates
1254 the register is not used for calling a function. If the machine
1255 has register windows, this gives only the outbound registers.
1256 INCOMING_REGNO gives the corresponding inbound register. */
1257 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1259 /* For each register that may be used for returning values, this gives
1260 a mode used to copy the register's value. VOIDmode indicates the
1261 register is not used for returning values. If the machine has
1262 register windows, this gives only the outbound registers.
1263 INCOMING_REGNO gives the corresponding inbound register. */
1264 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1266 /* Return the size required for the block returned by __builtin_apply_args,
1267 and initialize apply_args_mode. */
1269 static int
1270 apply_args_size (void)
1272 static int size = -1;
1273 int align;
1274 unsigned int regno;
1275 enum machine_mode mode;
1277 /* The values computed by this function never change. */
1278 if (size < 0)
1280 /* The first value is the incoming arg-pointer. */
1281 size = GET_MODE_SIZE (Pmode);
1283 /* The second value is the structure value address unless this is
1284 passed as an "invisible" first argument. */
1285 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1286 size += GET_MODE_SIZE (Pmode);
1288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1289 if (FUNCTION_ARG_REGNO_P (regno))
1291 mode = reg_raw_mode[regno];
1293 gcc_assert (mode != VOIDmode);
1295 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1296 if (size % align != 0)
1297 size = CEIL (size, align) * align;
1298 size += GET_MODE_SIZE (mode);
1299 apply_args_mode[regno] = mode;
1301 else
1303 apply_args_mode[regno] = VOIDmode;
1306 return size;
1309 /* Return the size required for the block returned by __builtin_apply,
1310 and initialize apply_result_mode. */
1312 static int
1313 apply_result_size (void)
1315 static int size = -1;
1316 int align, regno;
1317 enum machine_mode mode;
1319 /* The values computed by this function never change. */
1320 if (size < 0)
1322 size = 0;
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if (FUNCTION_VALUE_REGNO_P (regno))
1327 mode = reg_raw_mode[regno];
1329 gcc_assert (mode != VOIDmode);
1331 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1332 if (size % align != 0)
1333 size = CEIL (size, align) * align;
1334 size += GET_MODE_SIZE (mode);
1335 apply_result_mode[regno] = mode;
1337 else
1338 apply_result_mode[regno] = VOIDmode;
1340 /* Allow targets that use untyped_call and untyped_return to override
1341 the size so that machine-specific information can be stored here. */
1342 #ifdef APPLY_RESULT_SIZE
1343 size = APPLY_RESULT_SIZE;
1344 #endif
1346 return size;
1349 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1350 /* Create a vector describing the result block RESULT. If SAVEP is true,
1351 the result block is used to save the values; otherwise it is used to
1352 restore the values. */
1354 static rtx
1355 result_vector (int savep, rtx result)
1357 int regno, size, align, nelts;
1358 enum machine_mode mode;
1359 rtx reg, mem;
1360 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1362 size = nelts = 0;
1363 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1364 if ((mode = apply_result_mode[regno]) != VOIDmode)
1366 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1367 if (size % align != 0)
1368 size = CEIL (size, align) * align;
1369 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1370 mem = adjust_address (result, mode, size);
1371 savevec[nelts++] = (savep
1372 ? gen_rtx_SET (VOIDmode, mem, reg)
1373 : gen_rtx_SET (VOIDmode, reg, mem));
1374 size += GET_MODE_SIZE (mode);
1376 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1378 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1380 /* Save the state required to perform an untyped call with the same
1381 arguments as were passed to the current function. */
1383 static rtx
1384 expand_builtin_apply_args_1 (void)
1386 rtx registers, tem;
1387 int size, align, regno;
1388 enum machine_mode mode;
1389 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1391 /* Create a block where the arg-pointer, structure value address,
1392 and argument registers can be saved. */
1393 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1395 /* Walk past the arg-pointer and structure value address. */
1396 size = GET_MODE_SIZE (Pmode);
1397 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1398 size += GET_MODE_SIZE (Pmode);
1400 /* Save each register used in calling a function to the block. */
1401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402 if ((mode = apply_args_mode[regno]) != VOIDmode)
1404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1405 if (size % align != 0)
1406 size = CEIL (size, align) * align;
1408 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1410 emit_move_insn (adjust_address (registers, mode, size), tem);
1411 size += GET_MODE_SIZE (mode);
1414 /* Save the arg pointer to the block. */
1415 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1416 #ifdef STACK_GROWS_DOWNWARD
1417 /* We need the pointer as the caller actually passed them to us, not
1418 as we might have pretended they were passed. Make sure it's a valid
1419 operand, as emit_move_insn isn't expected to handle a PLUS. */
1421 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1422 NULL_RTX);
1423 #endif
1424 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1426 size = GET_MODE_SIZE (Pmode);
1428 /* Save the structure value address unless this is passed as an
1429 "invisible" first argument. */
1430 if (struct_incoming_value)
1432 emit_move_insn (adjust_address (registers, Pmode, size),
1433 copy_to_reg (struct_incoming_value));
1434 size += GET_MODE_SIZE (Pmode);
1437 /* Return the address of the block. */
1438 return copy_addr_to_reg (XEXP (registers, 0));
1441 /* __builtin_apply_args returns block of memory allocated on
1442 the stack into which is stored the arg pointer, structure
1443 value address, static chain, and all the registers that might
1444 possibly be used in performing a function call. The code is
1445 moved to the start of the function so the incoming values are
1446 saved. */
1448 static rtx
1449 expand_builtin_apply_args (void)
1451 /* Don't do __builtin_apply_args more than once in a function.
1452 Save the result of the first call and reuse it. */
1453 if (apply_args_value != 0)
1454 return apply_args_value;
1456 /* When this function is called, it means that registers must be
1457 saved on entry to this function. So we migrate the
1458 call to the first insn of this function. */
1459 rtx temp;
1460 rtx seq;
1462 start_sequence ();
1463 temp = expand_builtin_apply_args_1 ();
1464 seq = get_insns ();
1465 end_sequence ();
1467 apply_args_value = temp;
1469 /* Put the insns after the NOTE that starts the function.
1470 If this is inside a start_sequence, make the outer-level insn
1471 chain current, so the code is placed at the start of the
1472 function. If internal_arg_pointer is a non-virtual pseudo,
1473 it needs to be placed after the function that initializes
1474 that pseudo. */
1475 push_topmost_sequence ();
1476 if (REG_P (crtl->args.internal_arg_pointer)
1477 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1478 emit_insn_before (seq, parm_birth_insn);
1479 else
1480 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1481 pop_topmost_sequence ();
1482 return temp;
1486 /* Perform an untyped call and save the state required to perform an
1487 untyped return of whatever value was returned by the given function. */
1489 static rtx
1490 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1492 int size, align, regno;
1493 enum machine_mode mode;
1494 rtx incoming_args, result, reg, dest, src, call_insn;
1495 rtx old_stack_level = 0;
1496 rtx call_fusage = 0;
1497 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1499 arguments = convert_memory_address (Pmode, arguments);
1501 /* Create a block where the return registers can be saved. */
1502 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1504 /* Fetch the arg pointer from the ARGUMENTS block. */
1505 incoming_args = gen_reg_rtx (Pmode);
1506 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1507 #ifndef STACK_GROWS_DOWNWARD
1508 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1509 incoming_args, 0, OPTAB_LIB_WIDEN);
1510 #endif
1512 /* Push a new argument block and copy the arguments. Do not allow
1513 the (potential) memcpy call below to interfere with our stack
1514 manipulations. */
1515 do_pending_stack_adjust ();
1516 NO_DEFER_POP;
1518 /* Save the stack with nonlocal if available. */
1519 #ifdef HAVE_save_stack_nonlocal
1520 if (HAVE_save_stack_nonlocal)
1521 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1522 else
1523 #endif
1524 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1526 /* Allocate a block of memory onto the stack and copy the memory
1527 arguments to the outgoing arguments address. */
1528 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1530 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1531 may have already set current_function_calls_alloca to true.
1532 current_function_calls_alloca won't be set if argsize is zero,
1533 so we have to guarantee need_drap is true here. */
1534 if (SUPPORTS_STACK_ALIGNMENT)
1535 crtl->need_drap = true;
1537 dest = virtual_outgoing_args_rtx;
1538 #ifndef STACK_GROWS_DOWNWARD
1539 if (CONST_INT_P (argsize))
1540 dest = plus_constant (dest, -INTVAL (argsize));
1541 else
1542 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1543 #endif
1544 dest = gen_rtx_MEM (BLKmode, dest);
1545 set_mem_align (dest, PARM_BOUNDARY);
1546 src = gen_rtx_MEM (BLKmode, incoming_args);
1547 set_mem_align (src, PARM_BOUNDARY);
1548 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1550 /* Refer to the argument block. */
1551 apply_args_size ();
1552 arguments = gen_rtx_MEM (BLKmode, arguments);
1553 set_mem_align (arguments, PARM_BOUNDARY);
1555 /* Walk past the arg-pointer and structure value address. */
1556 size = GET_MODE_SIZE (Pmode);
1557 if (struct_value)
1558 size += GET_MODE_SIZE (Pmode);
1560 /* Restore each of the registers previously saved. Make USE insns
1561 for each of these registers for use in making the call. */
1562 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1563 if ((mode = apply_args_mode[regno]) != VOIDmode)
1565 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1566 if (size % align != 0)
1567 size = CEIL (size, align) * align;
1568 reg = gen_rtx_REG (mode, regno);
1569 emit_move_insn (reg, adjust_address (arguments, mode, size));
1570 use_reg (&call_fusage, reg);
1571 size += GET_MODE_SIZE (mode);
1574 /* Restore the structure value address unless this is passed as an
1575 "invisible" first argument. */
1576 size = GET_MODE_SIZE (Pmode);
1577 if (struct_value)
1579 rtx value = gen_reg_rtx (Pmode);
1580 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1581 emit_move_insn (struct_value, value);
1582 if (REG_P (struct_value))
1583 use_reg (&call_fusage, struct_value);
1584 size += GET_MODE_SIZE (Pmode);
1587 /* All arguments and registers used for the call are set up by now! */
1588 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1590 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1591 and we don't want to load it into a register as an optimization,
1592 because prepare_call_address already did it if it should be done. */
1593 if (GET_CODE (function) != SYMBOL_REF)
1594 function = memory_address (FUNCTION_MODE, function);
1596 /* Generate the actual call instruction and save the return value. */
1597 #ifdef HAVE_untyped_call
1598 if (HAVE_untyped_call)
1599 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1600 result, result_vector (1, result)));
1601 else
1602 #endif
1603 #ifdef HAVE_call_value
1604 if (HAVE_call_value)
1606 rtx valreg = 0;
1608 /* Locate the unique return register. It is not possible to
1609 express a call that sets more than one return register using
1610 call_value; use untyped_call for that. In fact, untyped_call
1611 only needs to save the return registers in the given block. */
1612 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1613 if ((mode = apply_result_mode[regno]) != VOIDmode)
1615 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1617 valreg = gen_rtx_REG (mode, regno);
1620 emit_call_insn (GEN_CALL_VALUE (valreg,
1621 gen_rtx_MEM (FUNCTION_MODE, function),
1622 const0_rtx, NULL_RTX, const0_rtx));
1624 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1626 else
1627 #endif
1628 gcc_unreachable ();
1630 /* Find the CALL insn we just emitted, and attach the register usage
1631 information. */
1632 call_insn = last_call_insn ();
1633 add_function_usage_to (call_insn, call_fusage);
1635 /* Restore the stack. */
1636 #ifdef HAVE_save_stack_nonlocal
1637 if (HAVE_save_stack_nonlocal)
1638 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1639 else
1640 #endif
1641 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1643 OK_DEFER_POP;
1645 /* Return the address of the result block. */
1646 result = copy_addr_to_reg (XEXP (result, 0));
1647 return convert_memory_address (ptr_mode, result);
1650 /* Perform an untyped return. */
1652 static void
1653 expand_builtin_return (rtx result)
1655 int size, align, regno;
1656 enum machine_mode mode;
1657 rtx reg;
1658 rtx call_fusage = 0;
1660 result = convert_memory_address (Pmode, result);
1662 apply_result_size ();
1663 result = gen_rtx_MEM (BLKmode, result);
1665 #ifdef HAVE_untyped_return
1666 if (HAVE_untyped_return)
1668 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1669 emit_barrier ();
1670 return;
1672 #endif
1674 /* Restore the return value and note that each value is used. */
1675 size = 0;
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1679 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1680 if (size % align != 0)
1681 size = CEIL (size, align) * align;
1682 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1683 emit_move_insn (reg, adjust_address (result, mode, size));
1685 push_to_sequence (call_fusage);
1686 emit_use (reg);
1687 call_fusage = get_insns ();
1688 end_sequence ();
1689 size += GET_MODE_SIZE (mode);
1692 /* Put the USE insns before the return. */
1693 emit_insn (call_fusage);
1695 /* Return whatever values was restored by jumping directly to the end
1696 of the function. */
1697 expand_naked_return ();
1700 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1702 static enum type_class
1703 type_to_class (tree type)
1705 switch (TREE_CODE (type))
1707 case VOID_TYPE: return void_type_class;
1708 case INTEGER_TYPE: return integer_type_class;
1709 case ENUMERAL_TYPE: return enumeral_type_class;
1710 case BOOLEAN_TYPE: return boolean_type_class;
1711 case POINTER_TYPE: return pointer_type_class;
1712 case REFERENCE_TYPE: return reference_type_class;
1713 case OFFSET_TYPE: return offset_type_class;
1714 case REAL_TYPE: return real_type_class;
1715 case COMPLEX_TYPE: return complex_type_class;
1716 case FUNCTION_TYPE: return function_type_class;
1717 case METHOD_TYPE: return method_type_class;
1718 case RECORD_TYPE: return record_type_class;
1719 case UNION_TYPE:
1720 case QUAL_UNION_TYPE: return union_type_class;
1721 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1722 ? string_type_class : array_type_class);
1723 case LANG_TYPE: return lang_type_class;
1724 default: return no_type_class;
1728 /* Expand a call EXP to __builtin_classify_type. */
1730 static rtx
1731 expand_builtin_classify_type (tree exp)
1733 if (call_expr_nargs (exp))
1734 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1735 return GEN_INT (no_type_class);
1738 /* This helper macro, meant to be used in mathfn_built_in below,
1739 determines which among a set of three builtin math functions is
1740 appropriate for a given type mode. The `F' and `L' cases are
1741 automatically generated from the `double' case. */
1742 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1743 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1744 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1745 fcodel = BUILT_IN_MATHFN##L ; break;
1746 /* Similar to above, but appends _R after any F/L suffix. */
1747 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1748 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1749 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1750 fcodel = BUILT_IN_MATHFN##L_R ; break;
1752 /* Return mathematic function equivalent to FN but operating directly
1753 on TYPE, if available. If IMPLICIT is true find the function in
1754 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1755 can't do the conversion, return zero. */
1757 static tree
1758 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1760 tree const *const fn_arr
1761 = implicit ? implicit_built_in_decls : built_in_decls;
1762 enum built_in_function fcode, fcodef, fcodel;
1764 switch (fn)
1766 CASE_MATHFN (BUILT_IN_ACOS)
1767 CASE_MATHFN (BUILT_IN_ACOSH)
1768 CASE_MATHFN (BUILT_IN_ASIN)
1769 CASE_MATHFN (BUILT_IN_ASINH)
1770 CASE_MATHFN (BUILT_IN_ATAN)
1771 CASE_MATHFN (BUILT_IN_ATAN2)
1772 CASE_MATHFN (BUILT_IN_ATANH)
1773 CASE_MATHFN (BUILT_IN_CBRT)
1774 CASE_MATHFN (BUILT_IN_CEIL)
1775 CASE_MATHFN (BUILT_IN_CEXPI)
1776 CASE_MATHFN (BUILT_IN_COPYSIGN)
1777 CASE_MATHFN (BUILT_IN_COS)
1778 CASE_MATHFN (BUILT_IN_COSH)
1779 CASE_MATHFN (BUILT_IN_DREM)
1780 CASE_MATHFN (BUILT_IN_ERF)
1781 CASE_MATHFN (BUILT_IN_ERFC)
1782 CASE_MATHFN (BUILT_IN_EXP)
1783 CASE_MATHFN (BUILT_IN_EXP10)
1784 CASE_MATHFN (BUILT_IN_EXP2)
1785 CASE_MATHFN (BUILT_IN_EXPM1)
1786 CASE_MATHFN (BUILT_IN_FABS)
1787 CASE_MATHFN (BUILT_IN_FDIM)
1788 CASE_MATHFN (BUILT_IN_FLOOR)
1789 CASE_MATHFN (BUILT_IN_FMA)
1790 CASE_MATHFN (BUILT_IN_FMAX)
1791 CASE_MATHFN (BUILT_IN_FMIN)
1792 CASE_MATHFN (BUILT_IN_FMOD)
1793 CASE_MATHFN (BUILT_IN_FREXP)
1794 CASE_MATHFN (BUILT_IN_GAMMA)
1795 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1796 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1797 CASE_MATHFN (BUILT_IN_HYPOT)
1798 CASE_MATHFN (BUILT_IN_ILOGB)
1799 CASE_MATHFN (BUILT_IN_INF)
1800 CASE_MATHFN (BUILT_IN_ISINF)
1801 CASE_MATHFN (BUILT_IN_J0)
1802 CASE_MATHFN (BUILT_IN_J1)
1803 CASE_MATHFN (BUILT_IN_JN)
1804 CASE_MATHFN (BUILT_IN_LCEIL)
1805 CASE_MATHFN (BUILT_IN_LDEXP)
1806 CASE_MATHFN (BUILT_IN_LFLOOR)
1807 CASE_MATHFN (BUILT_IN_LGAMMA)
1808 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1809 CASE_MATHFN (BUILT_IN_LLCEIL)
1810 CASE_MATHFN (BUILT_IN_LLFLOOR)
1811 CASE_MATHFN (BUILT_IN_LLRINT)
1812 CASE_MATHFN (BUILT_IN_LLROUND)
1813 CASE_MATHFN (BUILT_IN_LOG)
1814 CASE_MATHFN (BUILT_IN_LOG10)
1815 CASE_MATHFN (BUILT_IN_LOG1P)
1816 CASE_MATHFN (BUILT_IN_LOG2)
1817 CASE_MATHFN (BUILT_IN_LOGB)
1818 CASE_MATHFN (BUILT_IN_LRINT)
1819 CASE_MATHFN (BUILT_IN_LROUND)
1820 CASE_MATHFN (BUILT_IN_MODF)
1821 CASE_MATHFN (BUILT_IN_NAN)
1822 CASE_MATHFN (BUILT_IN_NANS)
1823 CASE_MATHFN (BUILT_IN_NEARBYINT)
1824 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1825 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1826 CASE_MATHFN (BUILT_IN_POW)
1827 CASE_MATHFN (BUILT_IN_POWI)
1828 CASE_MATHFN (BUILT_IN_POW10)
1829 CASE_MATHFN (BUILT_IN_REMAINDER)
1830 CASE_MATHFN (BUILT_IN_REMQUO)
1831 CASE_MATHFN (BUILT_IN_RINT)
1832 CASE_MATHFN (BUILT_IN_ROUND)
1833 CASE_MATHFN (BUILT_IN_SCALB)
1834 CASE_MATHFN (BUILT_IN_SCALBLN)
1835 CASE_MATHFN (BUILT_IN_SCALBN)
1836 CASE_MATHFN (BUILT_IN_SIGNBIT)
1837 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1838 CASE_MATHFN (BUILT_IN_SIN)
1839 CASE_MATHFN (BUILT_IN_SINCOS)
1840 CASE_MATHFN (BUILT_IN_SINH)
1841 CASE_MATHFN (BUILT_IN_SQRT)
1842 CASE_MATHFN (BUILT_IN_TAN)
1843 CASE_MATHFN (BUILT_IN_TANH)
1844 CASE_MATHFN (BUILT_IN_TGAMMA)
1845 CASE_MATHFN (BUILT_IN_TRUNC)
1846 CASE_MATHFN (BUILT_IN_Y0)
1847 CASE_MATHFN (BUILT_IN_Y1)
1848 CASE_MATHFN (BUILT_IN_YN)
1850 default:
1851 return NULL_TREE;
1854 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1855 return fn_arr[fcode];
1856 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1857 return fn_arr[fcodef];
1858 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1859 return fn_arr[fcodel];
1860 else
1861 return NULL_TREE;
1864 /* Like mathfn_built_in_1(), but always use the implicit array. */
1866 tree
1867 mathfn_built_in (tree type, enum built_in_function fn)
1869 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1872 /* If errno must be maintained, expand the RTL to check if the result,
1873 TARGET, of a built-in function call, EXP, is NaN, and if so set
1874 errno to EDOM. */
1876 static void
1877 expand_errno_check (tree exp, rtx target)
1879 rtx lab = gen_label_rtx ();
1881 /* Test the result; if it is NaN, set errno=EDOM because
1882 the argument was not in the domain. */
1883 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1884 NULL_RTX, NULL_RTX, lab,
1885 /* The jump is very likely. */
1886 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1888 #ifdef TARGET_EDOM
1889 /* If this built-in doesn't throw an exception, set errno directly. */
1890 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1892 #ifdef GEN_ERRNO_RTX
1893 rtx errno_rtx = GEN_ERRNO_RTX;
1894 #else
1895 rtx errno_rtx
1896 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1897 #endif
1898 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1899 emit_label (lab);
1900 return;
1902 #endif
1904 /* Make sure the library call isn't expanded as a tail call. */
1905 CALL_EXPR_TAILCALL (exp) = 0;
1907 /* We can't set errno=EDOM directly; let the library call do it.
1908 Pop the arguments right away in case the call gets deleted. */
1909 NO_DEFER_POP;
1910 expand_call (exp, target, 0);
1911 OK_DEFER_POP;
1912 emit_label (lab);
1915 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1916 Return NULL_RTX if a normal call should be emitted rather than expanding
1917 the function in-line. EXP is the expression that is a call to the builtin
1918 function; if convenient, the result should be placed in TARGET.
1919 SUBTARGET may be used as the target for computing one of EXP's operands. */
1921 static rtx
1922 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1924 optab builtin_optab;
1925 rtx op0, insns;
1926 tree fndecl = get_callee_fndecl (exp);
1927 enum machine_mode mode;
1928 bool errno_set = false;
1929 tree arg;
1931 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1932 return NULL_RTX;
1934 arg = CALL_EXPR_ARG (exp, 0);
1936 switch (DECL_FUNCTION_CODE (fndecl))
1938 CASE_FLT_FN (BUILT_IN_SQRT):
1939 errno_set = ! tree_expr_nonnegative_p (arg);
1940 builtin_optab = sqrt_optab;
1941 break;
1942 CASE_FLT_FN (BUILT_IN_EXP):
1943 errno_set = true; builtin_optab = exp_optab; break;
1944 CASE_FLT_FN (BUILT_IN_EXP10):
1945 CASE_FLT_FN (BUILT_IN_POW10):
1946 errno_set = true; builtin_optab = exp10_optab; break;
1947 CASE_FLT_FN (BUILT_IN_EXP2):
1948 errno_set = true; builtin_optab = exp2_optab; break;
1949 CASE_FLT_FN (BUILT_IN_EXPM1):
1950 errno_set = true; builtin_optab = expm1_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOGB):
1952 errno_set = true; builtin_optab = logb_optab; break;
1953 CASE_FLT_FN (BUILT_IN_LOG):
1954 errno_set = true; builtin_optab = log_optab; break;
1955 CASE_FLT_FN (BUILT_IN_LOG10):
1956 errno_set = true; builtin_optab = log10_optab; break;
1957 CASE_FLT_FN (BUILT_IN_LOG2):
1958 errno_set = true; builtin_optab = log2_optab; break;
1959 CASE_FLT_FN (BUILT_IN_LOG1P):
1960 errno_set = true; builtin_optab = log1p_optab; break;
1961 CASE_FLT_FN (BUILT_IN_ASIN):
1962 builtin_optab = asin_optab; break;
1963 CASE_FLT_FN (BUILT_IN_ACOS):
1964 builtin_optab = acos_optab; break;
1965 CASE_FLT_FN (BUILT_IN_TAN):
1966 builtin_optab = tan_optab; break;
1967 CASE_FLT_FN (BUILT_IN_ATAN):
1968 builtin_optab = atan_optab; break;
1969 CASE_FLT_FN (BUILT_IN_FLOOR):
1970 builtin_optab = floor_optab; break;
1971 CASE_FLT_FN (BUILT_IN_CEIL):
1972 builtin_optab = ceil_optab; break;
1973 CASE_FLT_FN (BUILT_IN_TRUNC):
1974 builtin_optab = btrunc_optab; break;
1975 CASE_FLT_FN (BUILT_IN_ROUND):
1976 builtin_optab = round_optab; break;
1977 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1978 builtin_optab = nearbyint_optab;
1979 if (flag_trapping_math)
1980 break;
1981 /* Else fallthrough and expand as rint. */
1982 CASE_FLT_FN (BUILT_IN_RINT):
1983 builtin_optab = rint_optab; break;
1984 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1985 builtin_optab = significand_optab; break;
1986 default:
1987 gcc_unreachable ();
1990 /* Make a suitable register to place result in. */
1991 mode = TYPE_MODE (TREE_TYPE (exp));
1993 if (! flag_errno_math || ! HONOR_NANS (mode))
1994 errno_set = false;
1996 /* Before working hard, check whether the instruction is available. */
1997 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1999 target = gen_reg_rtx (mode);
2001 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2002 need to expand the argument again. This way, we will not perform
2003 side-effects more the once. */
2004 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2006 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2008 start_sequence ();
2010 /* Compute into TARGET.
2011 Set TARGET to wherever the result comes back. */
2012 target = expand_unop (mode, builtin_optab, op0, target, 0);
2014 if (target != 0)
2016 if (errno_set)
2017 expand_errno_check (exp, target);
2019 /* Output the entire sequence. */
2020 insns = get_insns ();
2021 end_sequence ();
2022 emit_insn (insns);
2023 return target;
2026 /* If we were unable to expand via the builtin, stop the sequence
2027 (without outputting the insns) and call to the library function
2028 with the stabilized argument list. */
2029 end_sequence ();
2032 return expand_call (exp, target, target == const0_rtx);
2035 /* Expand a call to the builtin binary math functions (pow and atan2).
2036 Return NULL_RTX if a normal call should be emitted rather than expanding the
2037 function in-line. EXP is the expression that is a call to the builtin
2038 function; if convenient, the result should be placed in TARGET.
2039 SUBTARGET may be used as the target for computing one of EXP's
2040 operands. */
2042 static rtx
2043 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2045 optab builtin_optab;
2046 rtx op0, op1, insns;
2047 int op1_type = REAL_TYPE;
2048 tree fndecl = get_callee_fndecl (exp);
2049 tree arg0, arg1;
2050 enum machine_mode mode;
2051 bool errno_set = true;
2053 switch (DECL_FUNCTION_CODE (fndecl))
2055 CASE_FLT_FN (BUILT_IN_SCALBN):
2056 CASE_FLT_FN (BUILT_IN_SCALBLN):
2057 CASE_FLT_FN (BUILT_IN_LDEXP):
2058 op1_type = INTEGER_TYPE;
2059 default:
2060 break;
2063 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2064 return NULL_RTX;
2066 arg0 = CALL_EXPR_ARG (exp, 0);
2067 arg1 = CALL_EXPR_ARG (exp, 1);
2069 switch (DECL_FUNCTION_CODE (fndecl))
2071 CASE_FLT_FN (BUILT_IN_POW):
2072 builtin_optab = pow_optab; break;
2073 CASE_FLT_FN (BUILT_IN_ATAN2):
2074 builtin_optab = atan2_optab; break;
2075 CASE_FLT_FN (BUILT_IN_SCALB):
2076 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2077 return 0;
2078 builtin_optab = scalb_optab; break;
2079 CASE_FLT_FN (BUILT_IN_SCALBN):
2080 CASE_FLT_FN (BUILT_IN_SCALBLN):
2081 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2082 return 0;
2083 /* Fall through... */
2084 CASE_FLT_FN (BUILT_IN_LDEXP):
2085 builtin_optab = ldexp_optab; break;
2086 CASE_FLT_FN (BUILT_IN_FMOD):
2087 builtin_optab = fmod_optab; break;
2088 CASE_FLT_FN (BUILT_IN_REMAINDER):
2089 CASE_FLT_FN (BUILT_IN_DREM):
2090 builtin_optab = remainder_optab; break;
2091 default:
2092 gcc_unreachable ();
2095 /* Make a suitable register to place result in. */
2096 mode = TYPE_MODE (TREE_TYPE (exp));
2098 /* Before working hard, check whether the instruction is available. */
2099 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2100 return NULL_RTX;
2102 target = gen_reg_rtx (mode);
2104 if (! flag_errno_math || ! HONOR_NANS (mode))
2105 errno_set = false;
2107 /* Always stabilize the argument list. */
2108 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2109 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2111 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2112 op1 = expand_normal (arg1);
2114 start_sequence ();
2116 /* Compute into TARGET.
2117 Set TARGET to wherever the result comes back. */
2118 target = expand_binop (mode, builtin_optab, op0, op1,
2119 target, 0, OPTAB_DIRECT);
2121 /* If we were unable to expand via the builtin, stop the sequence
2122 (without outputting the insns) and call to the library function
2123 with the stabilized argument list. */
2124 if (target == 0)
2126 end_sequence ();
2127 return expand_call (exp, target, target == const0_rtx);
2130 if (errno_set)
2131 expand_errno_check (exp, target);
2133 /* Output the entire sequence. */
2134 insns = get_insns ();
2135 end_sequence ();
2136 emit_insn (insns);
2138 return target;
2141 /* Expand a call to the builtin sin and cos math functions.
2142 Return NULL_RTX if a normal call should be emitted rather than expanding the
2143 function in-line. EXP is the expression that is a call to the builtin
2144 function; if convenient, the result should be placed in TARGET.
2145 SUBTARGET may be used as the target for computing one of EXP's
2146 operands. */
2148 static rtx
2149 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2151 optab builtin_optab;
2152 rtx op0, insns;
2153 tree fndecl = get_callee_fndecl (exp);
2154 enum machine_mode mode;
2155 tree arg;
2157 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2158 return NULL_RTX;
2160 arg = CALL_EXPR_ARG (exp, 0);
2162 switch (DECL_FUNCTION_CODE (fndecl))
2164 CASE_FLT_FN (BUILT_IN_SIN):
2165 CASE_FLT_FN (BUILT_IN_COS):
2166 builtin_optab = sincos_optab; break;
2167 default:
2168 gcc_unreachable ();
2171 /* Make a suitable register to place result in. */
2172 mode = TYPE_MODE (TREE_TYPE (exp));
2174 /* Check if sincos insn is available, otherwise fallback
2175 to sin or cos insn. */
2176 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2177 switch (DECL_FUNCTION_CODE (fndecl))
2179 CASE_FLT_FN (BUILT_IN_SIN):
2180 builtin_optab = sin_optab; break;
2181 CASE_FLT_FN (BUILT_IN_COS):
2182 builtin_optab = cos_optab; break;
2183 default:
2184 gcc_unreachable ();
2187 /* Before working hard, check whether the instruction is available. */
2188 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2190 target = gen_reg_rtx (mode);
2192 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2193 need to expand the argument again. This way, we will not perform
2194 side-effects more the once. */
2195 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2197 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2199 start_sequence ();
2201 /* Compute into TARGET.
2202 Set TARGET to wherever the result comes back. */
2203 if (builtin_optab == sincos_optab)
2205 int result;
2207 switch (DECL_FUNCTION_CODE (fndecl))
2209 CASE_FLT_FN (BUILT_IN_SIN):
2210 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2211 break;
2212 CASE_FLT_FN (BUILT_IN_COS):
2213 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2214 break;
2215 default:
2216 gcc_unreachable ();
2218 gcc_assert (result);
2220 else
2222 target = expand_unop (mode, builtin_optab, op0, target, 0);
2225 if (target != 0)
2227 /* Output the entire sequence. */
2228 insns = get_insns ();
2229 end_sequence ();
2230 emit_insn (insns);
2231 return target;
2234 /* If we were unable to expand via the builtin, stop the sequence
2235 (without outputting the insns) and call to the library function
2236 with the stabilized argument list. */
2237 end_sequence ();
2240 target = expand_call (exp, target, target == const0_rtx);
2242 return target;
2245 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2246 return an RTL instruction code that implements the functionality.
2247 If that isn't possible or available return CODE_FOR_nothing. */
2249 static enum insn_code
2250 interclass_mathfn_icode (tree arg, tree fndecl)
2252 bool errno_set = false;
2253 optab builtin_optab = 0;
2254 enum machine_mode mode;
2256 switch (DECL_FUNCTION_CODE (fndecl))
2258 CASE_FLT_FN (BUILT_IN_ILOGB):
2259 errno_set = true; builtin_optab = ilogb_optab; break;
2260 CASE_FLT_FN (BUILT_IN_ISINF):
2261 builtin_optab = isinf_optab; break;
2262 case BUILT_IN_ISNORMAL:
2263 case BUILT_IN_ISFINITE:
2264 CASE_FLT_FN (BUILT_IN_FINITE):
2265 case BUILT_IN_FINITED32:
2266 case BUILT_IN_FINITED64:
2267 case BUILT_IN_FINITED128:
2268 case BUILT_IN_ISINFD32:
2269 case BUILT_IN_ISINFD64:
2270 case BUILT_IN_ISINFD128:
2271 /* These builtins have no optabs (yet). */
2272 break;
2273 default:
2274 gcc_unreachable ();
2277 /* There's no easy way to detect the case we need to set EDOM. */
2278 if (flag_errno_math && errno_set)
2279 return CODE_FOR_nothing;
2281 /* Optab mode depends on the mode of the input argument. */
2282 mode = TYPE_MODE (TREE_TYPE (arg));
2284 if (builtin_optab)
2285 return optab_handler (builtin_optab, mode)->insn_code;
2286 return CODE_FOR_nothing;
2289 /* Expand a call to one of the builtin math functions that operate on
2290 floating point argument and output an integer result (ilogb, isinf,
2291 isnan, etc).
2292 Return 0 if a normal call should be emitted rather than expanding the
2293 function in-line. EXP is the expression that is a call to the builtin
2294 function; if convenient, the result should be placed in TARGET.
2295 SUBTARGET may be used as the target for computing one of EXP's operands. */
2297 static rtx
2298 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2300 enum insn_code icode = CODE_FOR_nothing;
2301 rtx op0;
2302 tree fndecl = get_callee_fndecl (exp);
2303 enum machine_mode mode;
2304 tree arg;
2306 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2307 return NULL_RTX;
2309 arg = CALL_EXPR_ARG (exp, 0);
2310 icode = interclass_mathfn_icode (arg, fndecl);
2311 mode = TYPE_MODE (TREE_TYPE (arg));
2313 if (icode != CODE_FOR_nothing)
2315 rtx last = get_last_insn ();
2316 tree orig_arg = arg;
2317 /* Make a suitable register to place result in. */
2318 if (!target
2319 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2320 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2321 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2323 gcc_assert (insn_data[icode].operand[0].predicate
2324 (target, GET_MODE (target)));
2326 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2327 need to expand the argument again. This way, we will not perform
2328 side-effects more the once. */
2329 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2331 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2333 if (mode != GET_MODE (op0))
2334 op0 = convert_to_mode (mode, op0, 0);
2336 /* Compute into TARGET.
2337 Set TARGET to wherever the result comes back. */
2338 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2339 return target;
2340 delete_insns_since (last);
2341 CALL_EXPR_ARG (exp, 0) = orig_arg;
2344 return NULL_RTX;
2347 /* Expand a call to the builtin sincos math function.
2348 Return NULL_RTX if a normal call should be emitted rather than expanding the
2349 function in-line. EXP is the expression that is a call to the builtin
2350 function. */
2352 static rtx
2353 expand_builtin_sincos (tree exp)
2355 rtx op0, op1, op2, target1, target2;
2356 enum machine_mode mode;
2357 tree arg, sinp, cosp;
2358 int result;
2359 location_t loc = EXPR_LOCATION (exp);
2361 if (!validate_arglist (exp, REAL_TYPE,
2362 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2363 return NULL_RTX;
2365 arg = CALL_EXPR_ARG (exp, 0);
2366 sinp = CALL_EXPR_ARG (exp, 1);
2367 cosp = CALL_EXPR_ARG (exp, 2);
2369 /* Make a suitable register to place result in. */
2370 mode = TYPE_MODE (TREE_TYPE (arg));
2372 /* Check if sincos insn is available, otherwise emit the call. */
2373 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2374 return NULL_RTX;
2376 target1 = gen_reg_rtx (mode);
2377 target2 = gen_reg_rtx (mode);
2379 op0 = expand_normal (arg);
2380 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2381 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2383 /* Compute into target1 and target2.
2384 Set TARGET to wherever the result comes back. */
2385 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2386 gcc_assert (result);
2388 /* Move target1 and target2 to the memory locations indicated
2389 by op1 and op2. */
2390 emit_move_insn (op1, target1);
2391 emit_move_insn (op2, target2);
2393 return const0_rtx;
2396 /* Expand a call to the internal cexpi builtin to the sincos math function.
2397 EXP is the expression that is a call to the builtin function; if convenient,
2398 the result should be placed in TARGET. SUBTARGET may be used as the target
2399 for computing one of EXP's operands. */
2401 static rtx
2402 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2404 tree fndecl = get_callee_fndecl (exp);
2405 tree arg, type;
2406 enum machine_mode mode;
2407 rtx op0, op1, op2;
2408 location_t loc = EXPR_LOCATION (exp);
2410 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2411 return NULL_RTX;
2413 arg = CALL_EXPR_ARG (exp, 0);
2414 type = TREE_TYPE (arg);
2415 mode = TYPE_MODE (TREE_TYPE (arg));
2417 /* Try expanding via a sincos optab, fall back to emitting a libcall
2418 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2419 is only generated from sincos, cexp or if we have either of them. */
2420 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2422 op1 = gen_reg_rtx (mode);
2423 op2 = gen_reg_rtx (mode);
2425 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2427 /* Compute into op1 and op2. */
2428 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2430 else if (TARGET_HAS_SINCOS)
2432 tree call, fn = NULL_TREE;
2433 tree top1, top2;
2434 rtx op1a, op2a;
2436 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2437 fn = built_in_decls[BUILT_IN_SINCOSF];
2438 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2439 fn = built_in_decls[BUILT_IN_SINCOS];
2440 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2441 fn = built_in_decls[BUILT_IN_SINCOSL];
2442 else
2443 gcc_unreachable ();
2445 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2446 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2447 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2448 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2449 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2450 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2452 /* Make sure not to fold the sincos call again. */
2453 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2454 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2455 call, 3, arg, top1, top2));
2457 else
2459 tree call, fn = NULL_TREE, narg;
2460 tree ctype = build_complex_type (type);
2462 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2463 fn = built_in_decls[BUILT_IN_CEXPF];
2464 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2465 fn = built_in_decls[BUILT_IN_CEXP];
2466 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2467 fn = built_in_decls[BUILT_IN_CEXPL];
2468 else
2469 gcc_unreachable ();
2471 /* If we don't have a decl for cexp create one. This is the
2472 friendliest fallback if the user calls __builtin_cexpi
2473 without full target C99 function support. */
2474 if (fn == NULL_TREE)
2476 tree fntype;
2477 const char *name = NULL;
2479 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2480 name = "cexpf";
2481 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2482 name = "cexp";
2483 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2484 name = "cexpl";
2486 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2487 fn = build_fn_decl (name, fntype);
2490 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2491 build_real (type, dconst0), arg);
2493 /* Make sure not to fold the cexp call again. */
2494 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2495 return expand_expr (build_call_nary (ctype, call, 1, narg),
2496 target, VOIDmode, EXPAND_NORMAL);
2499 /* Now build the proper return type. */
2500 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2501 make_tree (TREE_TYPE (arg), op2),
2502 make_tree (TREE_TYPE (arg), op1)),
2503 target, VOIDmode, EXPAND_NORMAL);
2506 /* Conveniently construct a function call expression. FNDECL names the
2507 function to be called, N is the number of arguments, and the "..."
2508 parameters are the argument expressions. Unlike build_call_exr
2509 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2511 static tree
2512 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2514 va_list ap;
2515 tree fntype = TREE_TYPE (fndecl);
2516 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2518 va_start (ap, n);
2519 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2520 va_end (ap);
2521 SET_EXPR_LOCATION (fn, loc);
2522 return fn;
2524 #define build_call_nofold(...) \
2525 build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
2527 /* Expand a call to one of the builtin rounding functions gcc defines
2528 as an extension (lfloor and lceil). As these are gcc extensions we
2529 do not need to worry about setting errno to EDOM.
2530 If expanding via optab fails, lower expression to (int)(floor(x)).
2531 EXP is the expression that is a call to the builtin function;
2532 if convenient, the result should be placed in TARGET. */
2534 static rtx
2535 expand_builtin_int_roundingfn (tree exp, rtx target)
2537 convert_optab builtin_optab;
2538 rtx op0, insns, tmp;
2539 tree fndecl = get_callee_fndecl (exp);
2540 enum built_in_function fallback_fn;
2541 tree fallback_fndecl;
2542 enum machine_mode mode;
2543 tree arg;
2545 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2546 gcc_unreachable ();
2548 arg = CALL_EXPR_ARG (exp, 0);
2550 switch (DECL_FUNCTION_CODE (fndecl))
2552 CASE_FLT_FN (BUILT_IN_LCEIL):
2553 CASE_FLT_FN (BUILT_IN_LLCEIL):
2554 builtin_optab = lceil_optab;
2555 fallback_fn = BUILT_IN_CEIL;
2556 break;
2558 CASE_FLT_FN (BUILT_IN_LFLOOR):
2559 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2560 builtin_optab = lfloor_optab;
2561 fallback_fn = BUILT_IN_FLOOR;
2562 break;
2564 default:
2565 gcc_unreachable ();
2568 /* Make a suitable register to place result in. */
2569 mode = TYPE_MODE (TREE_TYPE (exp));
2571 target = gen_reg_rtx (mode);
2573 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2574 need to expand the argument again. This way, we will not perform
2575 side-effects more the once. */
2576 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2578 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2580 start_sequence ();
2582 /* Compute into TARGET. */
2583 if (expand_sfix_optab (target, op0, builtin_optab))
2585 /* Output the entire sequence. */
2586 insns = get_insns ();
2587 end_sequence ();
2588 emit_insn (insns);
2589 return target;
2592 /* If we were unable to expand via the builtin, stop the sequence
2593 (without outputting the insns). */
2594 end_sequence ();
2596 /* Fall back to floating point rounding optab. */
2597 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2599 /* For non-C99 targets we may end up without a fallback fndecl here
2600 if the user called __builtin_lfloor directly. In this case emit
2601 a call to the floor/ceil variants nevertheless. This should result
2602 in the best user experience for not full C99 targets. */
2603 if (fallback_fndecl == NULL_TREE)
2605 tree fntype;
2606 const char *name = NULL;
2608 switch (DECL_FUNCTION_CODE (fndecl))
2610 case BUILT_IN_LCEIL:
2611 case BUILT_IN_LLCEIL:
2612 name = "ceil";
2613 break;
2614 case BUILT_IN_LCEILF:
2615 case BUILT_IN_LLCEILF:
2616 name = "ceilf";
2617 break;
2618 case BUILT_IN_LCEILL:
2619 case BUILT_IN_LLCEILL:
2620 name = "ceill";
2621 break;
2622 case BUILT_IN_LFLOOR:
2623 case BUILT_IN_LLFLOOR:
2624 name = "floor";
2625 break;
2626 case BUILT_IN_LFLOORF:
2627 case BUILT_IN_LLFLOORF:
2628 name = "floorf";
2629 break;
2630 case BUILT_IN_LFLOORL:
2631 case BUILT_IN_LLFLOORL:
2632 name = "floorl";
2633 break;
2634 default:
2635 gcc_unreachable ();
2638 fntype = build_function_type_list (TREE_TYPE (arg),
2639 TREE_TYPE (arg), NULL_TREE);
2640 fallback_fndecl = build_fn_decl (name, fntype);
2643 exp = build_call_nofold (fallback_fndecl, 1, arg);
2645 tmp = expand_normal (exp);
2647 /* Truncate the result of floating point optab to integer
2648 via expand_fix (). */
2649 target = gen_reg_rtx (mode);
2650 expand_fix (target, tmp, 0);
2652 return target;
2655 /* Expand a call to one of the builtin math functions doing integer
2656 conversion (lrint).
2657 Return 0 if a normal call should be emitted rather than expanding the
2658 function in-line. EXP is the expression that is a call to the builtin
2659 function; if convenient, the result should be placed in TARGET. */
2661 static rtx
2662 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2664 convert_optab builtin_optab;
2665 rtx op0, insns;
2666 tree fndecl = get_callee_fndecl (exp);
2667 tree arg;
2668 enum machine_mode mode;
2670 /* There's no easy way to detect the case we need to set EDOM. */
2671 if (flag_errno_math)
2672 return NULL_RTX;
2674 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2675 gcc_unreachable ();
2677 arg = CALL_EXPR_ARG (exp, 0);
2679 switch (DECL_FUNCTION_CODE (fndecl))
2681 CASE_FLT_FN (BUILT_IN_LRINT):
2682 CASE_FLT_FN (BUILT_IN_LLRINT):
2683 builtin_optab = lrint_optab; break;
2684 CASE_FLT_FN (BUILT_IN_LROUND):
2685 CASE_FLT_FN (BUILT_IN_LLROUND):
2686 builtin_optab = lround_optab; break;
2687 default:
2688 gcc_unreachable ();
2691 /* Make a suitable register to place result in. */
2692 mode = TYPE_MODE (TREE_TYPE (exp));
2694 target = gen_reg_rtx (mode);
2696 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2697 need to expand the argument again. This way, we will not perform
2698 side-effects more the once. */
2699 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2701 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2703 start_sequence ();
2705 if (expand_sfix_optab (target, op0, builtin_optab))
2707 /* Output the entire sequence. */
2708 insns = get_insns ();
2709 end_sequence ();
2710 emit_insn (insns);
2711 return target;
2714 /* If we were unable to expand via the builtin, stop the sequence
2715 (without outputting the insns) and call to the library function
2716 with the stabilized argument list. */
2717 end_sequence ();
2719 target = expand_call (exp, target, target == const0_rtx);
2721 return target;
2724 /* To evaluate powi(x,n), the floating point value x raised to the
2725 constant integer exponent n, we use a hybrid algorithm that
2726 combines the "window method" with look-up tables. For an
2727 introduction to exponentiation algorithms and "addition chains",
2728 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2729 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2730 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2731 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2733 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2734 multiplications to inline before calling the system library's pow
2735 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2736 so this default never requires calling pow, powf or powl. */
2738 #ifndef POWI_MAX_MULTS
2739 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2740 #endif
2742 /* The size of the "optimal power tree" lookup table. All
2743 exponents less than this value are simply looked up in the
2744 powi_table below. This threshold is also used to size the
2745 cache of pseudo registers that hold intermediate results. */
2746 #define POWI_TABLE_SIZE 256
2748 /* The size, in bits of the window, used in the "window method"
2749 exponentiation algorithm. This is equivalent to a radix of
2750 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2751 #define POWI_WINDOW_SIZE 3
2753 /* The following table is an efficient representation of an
2754 "optimal power tree". For each value, i, the corresponding
2755 value, j, in the table states than an optimal evaluation
2756 sequence for calculating pow(x,i) can be found by evaluating
2757 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2758 100 integers is given in Knuth's "Seminumerical algorithms". */
2760 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2762 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2763 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2764 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2765 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2766 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2767 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2768 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2769 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2770 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2771 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2772 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2773 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2774 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2775 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2776 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2777 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2778 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2779 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2780 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2781 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2782 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2783 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2784 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2785 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2786 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2787 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2788 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2789 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2790 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2791 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2792 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2793 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2797 /* Return the number of multiplications required to calculate
2798 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2799 subroutine of powi_cost. CACHE is an array indicating
2800 which exponents have already been calculated. */
2802 static int
2803 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2805 /* If we've already calculated this exponent, then this evaluation
2806 doesn't require any additional multiplications. */
2807 if (cache[n])
2808 return 0;
2810 cache[n] = true;
2811 return powi_lookup_cost (n - powi_table[n], cache)
2812 + powi_lookup_cost (powi_table[n], cache) + 1;
2815 /* Return the number of multiplications required to calculate
2816 powi(x,n) for an arbitrary x, given the exponent N. This
2817 function needs to be kept in sync with expand_powi below. */
2819 static int
2820 powi_cost (HOST_WIDE_INT n)
2822 bool cache[POWI_TABLE_SIZE];
2823 unsigned HOST_WIDE_INT digit;
2824 unsigned HOST_WIDE_INT val;
2825 int result;
2827 if (n == 0)
2828 return 0;
2830 /* Ignore the reciprocal when calculating the cost. */
2831 val = (n < 0) ? -n : n;
2833 /* Initialize the exponent cache. */
2834 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2835 cache[1] = true;
2837 result = 0;
2839 while (val >= POWI_TABLE_SIZE)
2841 if (val & 1)
2843 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2844 result += powi_lookup_cost (digit, cache)
2845 + POWI_WINDOW_SIZE + 1;
2846 val >>= POWI_WINDOW_SIZE;
2848 else
2850 val >>= 1;
2851 result++;
2855 return result + powi_lookup_cost (val, cache);
2858 /* Recursive subroutine of expand_powi. This function takes the array,
2859 CACHE, of already calculated exponents and an exponent N and returns
2860 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2862 static rtx
2863 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2865 unsigned HOST_WIDE_INT digit;
2866 rtx target, result;
2867 rtx op0, op1;
2869 if (n < POWI_TABLE_SIZE)
2871 if (cache[n])
2872 return cache[n];
2874 target = gen_reg_rtx (mode);
2875 cache[n] = target;
2877 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2878 op1 = expand_powi_1 (mode, powi_table[n], cache);
2880 else if (n & 1)
2882 target = gen_reg_rtx (mode);
2883 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2884 op0 = expand_powi_1 (mode, n - digit, cache);
2885 op1 = expand_powi_1 (mode, digit, cache);
2887 else
2889 target = gen_reg_rtx (mode);
2890 op0 = expand_powi_1 (mode, n >> 1, cache);
2891 op1 = op0;
2894 result = expand_mult (mode, op0, op1, target, 0);
2895 if (result != target)
2896 emit_move_insn (target, result);
2897 return target;
2900 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2901 floating point operand in mode MODE, and N is the exponent. This
2902 function needs to be kept in sync with powi_cost above. */
2904 static rtx
2905 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2907 rtx cache[POWI_TABLE_SIZE];
2908 rtx result;
2910 if (n == 0)
2911 return CONST1_RTX (mode);
2913 memset (cache, 0, sizeof (cache));
2914 cache[1] = x;
2916 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2918 /* If the original exponent was negative, reciprocate the result. */
2919 if (n < 0)
2920 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2921 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2923 return result;
2926 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2927 a normal call should be emitted rather than expanding the function
2928 in-line. EXP is the expression that is a call to the builtin
2929 function; if convenient, the result should be placed in TARGET. */
2931 static rtx
2932 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2934 tree arg0, arg1;
2935 tree fn, narg0;
2936 tree type = TREE_TYPE (exp);
2937 REAL_VALUE_TYPE cint, c, c2;
2938 HOST_WIDE_INT n;
2939 rtx op, op2;
2940 enum machine_mode mode = TYPE_MODE (type);
2942 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2943 return NULL_RTX;
2945 arg0 = CALL_EXPR_ARG (exp, 0);
2946 arg1 = CALL_EXPR_ARG (exp, 1);
2948 if (TREE_CODE (arg1) != REAL_CST
2949 || TREE_OVERFLOW (arg1))
2950 return expand_builtin_mathfn_2 (exp, target, subtarget);
2952 /* Handle constant exponents. */
2954 /* For integer valued exponents we can expand to an optimal multiplication
2955 sequence using expand_powi. */
2956 c = TREE_REAL_CST (arg1);
2957 n = real_to_integer (&c);
2958 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2959 if (real_identical (&c, &cint)
2960 && ((n >= -1 && n <= 2)
2961 || (flag_unsafe_math_optimizations
2962 && optimize_insn_for_speed_p ()
2963 && powi_cost (n) <= POWI_MAX_MULTS)))
2965 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2966 if (n != 1)
2968 op = force_reg (mode, op);
2969 op = expand_powi (op, mode, n);
2971 return op;
2974 narg0 = builtin_save_expr (arg0);
2976 /* If the exponent is not integer valued, check if it is half of an integer.
2977 In this case we can expand to sqrt (x) * x**(n/2). */
2978 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2979 if (fn != NULL_TREE)
2981 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2982 n = real_to_integer (&c2);
2983 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2984 if (real_identical (&c2, &cint)
2985 && ((flag_unsafe_math_optimizations
2986 && optimize_insn_for_speed_p ()
2987 && powi_cost (n/2) <= POWI_MAX_MULTS)
2988 /* Even the c == 0.5 case cannot be done unconditionally
2989 when we need to preserve signed zeros, as
2990 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
2991 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
2992 /* For c == 1.5 we can assume that x * sqrt (x) is always
2993 smaller than pow (x, 1.5) if sqrt will not be expanded
2994 as a call. */
2995 || (n == 3
2996 && (optab_handler (sqrt_optab, mode)->insn_code
2997 != CODE_FOR_nothing))))
2999 tree call_expr = build_call_nofold (fn, 1, narg0);
3000 /* Use expand_expr in case the newly built call expression
3001 was folded to a non-call. */
3002 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3003 if (n != 1)
3005 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3006 op2 = force_reg (mode, op2);
3007 op2 = expand_powi (op2, mode, abs (n / 2));
3008 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3009 0, OPTAB_LIB_WIDEN);
3010 /* If the original exponent was negative, reciprocate the
3011 result. */
3012 if (n < 0)
3013 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3014 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3016 return op;
3020 /* Try if the exponent is a third of an integer. In this case
3021 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3022 different from pow (x, 1./3.) due to rounding and behavior
3023 with negative x we need to constrain this transformation to
3024 unsafe math and positive x or finite math. */
3025 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3026 if (fn != NULL_TREE
3027 && flag_unsafe_math_optimizations
3028 && (tree_expr_nonnegative_p (arg0)
3029 || !HONOR_NANS (mode)))
3031 REAL_VALUE_TYPE dconst3;
3032 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3033 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3034 real_round (&c2, mode, &c2);
3035 n = real_to_integer (&c2);
3036 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3037 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3038 real_convert (&c2, mode, &c2);
3039 if (real_identical (&c2, &c)
3040 && ((optimize_insn_for_speed_p ()
3041 && powi_cost (n/3) <= POWI_MAX_MULTS)
3042 || n == 1))
3044 tree call_expr = build_call_nofold (fn, 1,narg0);
3045 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3046 if (abs (n) % 3 == 2)
3047 op = expand_simple_binop (mode, MULT, op, op, op,
3048 0, OPTAB_LIB_WIDEN);
3049 if (n != 1)
3051 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3052 op2 = force_reg (mode, op2);
3053 op2 = expand_powi (op2, mode, abs (n / 3));
3054 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3055 0, OPTAB_LIB_WIDEN);
3056 /* If the original exponent was negative, reciprocate the
3057 result. */
3058 if (n < 0)
3059 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3060 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3062 return op;
3066 /* Fall back to optab expansion. */
3067 return expand_builtin_mathfn_2 (exp, target, subtarget);
3070 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3071 a normal call should be emitted rather than expanding the function
3072 in-line. EXP is the expression that is a call to the builtin
3073 function; if convenient, the result should be placed in TARGET. */
3075 static rtx
3076 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3078 tree arg0, arg1;
3079 rtx op0, op1;
3080 enum machine_mode mode;
3081 enum machine_mode mode2;
3083 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3084 return NULL_RTX;
3086 arg0 = CALL_EXPR_ARG (exp, 0);
3087 arg1 = CALL_EXPR_ARG (exp, 1);
3088 mode = TYPE_MODE (TREE_TYPE (exp));
3090 /* Handle constant power. */
3092 if (TREE_CODE (arg1) == INTEGER_CST
3093 && !TREE_OVERFLOW (arg1))
3095 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3097 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3098 Otherwise, check the number of multiplications required. */
3099 if ((TREE_INT_CST_HIGH (arg1) == 0
3100 || TREE_INT_CST_HIGH (arg1) == -1)
3101 && ((n >= -1 && n <= 2)
3102 || (optimize_insn_for_speed_p ()
3103 && powi_cost (n) <= POWI_MAX_MULTS)))
3105 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3106 op0 = force_reg (mode, op0);
3107 return expand_powi (op0, mode, n);
3111 /* Emit a libcall to libgcc. */
3113 /* Mode of the 2nd argument must match that of an int. */
3114 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3116 if (target == NULL_RTX)
3117 target = gen_reg_rtx (mode);
3119 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3120 if (GET_MODE (op0) != mode)
3121 op0 = convert_to_mode (mode, op0, 0);
3122 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3123 if (GET_MODE (op1) != mode2)
3124 op1 = convert_to_mode (mode2, op1, 0);
3126 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3127 target, LCT_CONST, mode, 2,
3128 op0, mode, op1, mode2);
3130 return target;
3133 /* Expand expression EXP which is a call to the strlen builtin. Return
3134 NULL_RTX if we failed the caller should emit a normal call, otherwise
3135 try to get the result in TARGET, if convenient. */
3137 static rtx
3138 expand_builtin_strlen (tree exp, rtx target,
3139 enum machine_mode target_mode)
3141 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3142 return NULL_RTX;
3143 else
3145 rtx pat;
3146 tree len;
3147 tree src = CALL_EXPR_ARG (exp, 0);
3148 rtx result, src_reg, char_rtx, before_strlen;
3149 enum machine_mode insn_mode = target_mode, char_mode;
3150 enum insn_code icode = CODE_FOR_nothing;
3151 int align;
3153 /* If the length can be computed at compile-time, return it. */
3154 len = c_strlen (src, 0);
3155 if (len)
3156 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3158 /* If the length can be computed at compile-time and is constant
3159 integer, but there are side-effects in src, evaluate
3160 src for side-effects, then return len.
3161 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3162 can be optimized into: i++; x = 3; */
3163 len = c_strlen (src, 1);
3164 if (len && TREE_CODE (len) == INTEGER_CST)
3166 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3167 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3170 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3172 /* If SRC is not a pointer type, don't do this operation inline. */
3173 if (align == 0)
3174 return NULL_RTX;
3176 /* Bail out if we can't compute strlen in the right mode. */
3177 while (insn_mode != VOIDmode)
3179 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3180 if (icode != CODE_FOR_nothing)
3181 break;
3183 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3185 if (insn_mode == VOIDmode)
3186 return NULL_RTX;
3188 /* Make a place to write the result of the instruction. */
3189 result = target;
3190 if (! (result != 0
3191 && REG_P (result)
3192 && GET_MODE (result) == insn_mode
3193 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3194 result = gen_reg_rtx (insn_mode);
3196 /* Make a place to hold the source address. We will not expand
3197 the actual source until we are sure that the expansion will
3198 not fail -- there are trees that cannot be expanded twice. */
3199 src_reg = gen_reg_rtx (Pmode);
3201 /* Mark the beginning of the strlen sequence so we can emit the
3202 source operand later. */
3203 before_strlen = get_last_insn ();
3205 char_rtx = const0_rtx;
3206 char_mode = insn_data[(int) icode].operand[2].mode;
3207 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3208 char_mode))
3209 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3211 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3212 char_rtx, GEN_INT (align));
3213 if (! pat)
3214 return NULL_RTX;
3215 emit_insn (pat);
3217 /* Now that we are assured of success, expand the source. */
3218 start_sequence ();
3219 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3220 if (pat != src_reg)
3221 emit_move_insn (src_reg, pat);
3222 pat = get_insns ();
3223 end_sequence ();
3225 if (before_strlen)
3226 emit_insn_after (pat, before_strlen);
3227 else
3228 emit_insn_before (pat, get_insns ());
3230 /* Return the value in the proper mode for this function. */
3231 if (GET_MODE (result) == target_mode)
3232 target = result;
3233 else if (target != 0)
3234 convert_move (target, result, 0);
3235 else
3236 target = convert_to_mode (target_mode, result, 0);
3238 return target;
3242 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3243 bytes from constant string DATA + OFFSET and return it as target
3244 constant. */
3246 static rtx
3247 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3248 enum machine_mode mode)
3250 const char *str = (const char *) data;
3252 gcc_assert (offset >= 0
3253 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3254 <= strlen (str) + 1));
3256 return c_readstr (str + offset, mode);
3259 /* Expand a call EXP to the memcpy builtin.
3260 Return NULL_RTX if we failed, the caller should emit a normal call,
3261 otherwise try to get the result in TARGET, if convenient (and in
3262 mode MODE if that's convenient). */
3264 static rtx
3265 expand_builtin_memcpy (tree exp, rtx target)
3267 if (!validate_arglist (exp,
3268 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3269 return NULL_RTX;
3270 else
3272 tree dest = CALL_EXPR_ARG (exp, 0);
3273 tree src = CALL_EXPR_ARG (exp, 1);
3274 tree len = CALL_EXPR_ARG (exp, 2);
3275 const char *src_str;
3276 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3277 unsigned int dest_align
3278 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3279 rtx dest_mem, src_mem, dest_addr, len_rtx;
3280 HOST_WIDE_INT expected_size = -1;
3281 unsigned int expected_align = 0;
3283 /* If DEST is not a pointer type, call the normal function. */
3284 if (dest_align == 0)
3285 return NULL_RTX;
3287 /* If either SRC is not a pointer type, don't do this
3288 operation in-line. */
3289 if (src_align == 0)
3290 return NULL_RTX;
3292 if (currently_expanding_gimple_stmt)
3293 stringop_block_profile (currently_expanding_gimple_stmt,
3294 &expected_align, &expected_size);
3296 if (expected_align < dest_align)
3297 expected_align = dest_align;
3298 dest_mem = get_memory_rtx (dest, len);
3299 set_mem_align (dest_mem, dest_align);
3300 len_rtx = expand_normal (len);
3301 src_str = c_getstr (src);
3303 /* If SRC is a string constant and block move would be done
3304 by pieces, we can avoid loading the string from memory
3305 and only stored the computed constants. */
3306 if (src_str
3307 && CONST_INT_P (len_rtx)
3308 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3309 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3310 CONST_CAST (char *, src_str),
3311 dest_align, false))
3313 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3314 builtin_memcpy_read_str,
3315 CONST_CAST (char *, src_str),
3316 dest_align, false, 0);
3317 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3318 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3319 return dest_mem;
3322 src_mem = get_memory_rtx (src, len);
3323 set_mem_align (src_mem, src_align);
3325 /* Copy word part most expediently. */
3326 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3327 CALL_EXPR_TAILCALL (exp)
3328 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3329 expected_align, expected_size);
3331 if (dest_addr == 0)
3333 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3334 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3336 return dest_addr;
3340 /* Expand a call EXP to the mempcpy builtin.
3341 Return NULL_RTX if we failed; the caller should emit a normal call,
3342 otherwise try to get the result in TARGET, if convenient (and in
3343 mode MODE if that's convenient). If ENDP is 0 return the
3344 destination pointer, if ENDP is 1 return the end pointer ala
3345 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3346 stpcpy. */
3348 static rtx
3349 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3351 if (!validate_arglist (exp,
3352 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3353 return NULL_RTX;
3354 else
3356 tree dest = CALL_EXPR_ARG (exp, 0);
3357 tree src = CALL_EXPR_ARG (exp, 1);
3358 tree len = CALL_EXPR_ARG (exp, 2);
3359 return expand_builtin_mempcpy_args (dest, src, len,
3360 target, mode, /*endp=*/ 1);
3364 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3365 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3366 so that this can also be called without constructing an actual CALL_EXPR.
3367 The other arguments and return value are the same as for
3368 expand_builtin_mempcpy. */
3370 static rtx
3371 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3372 rtx target, enum machine_mode mode, int endp)
3374 /* If return value is ignored, transform mempcpy into memcpy. */
3375 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3377 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3378 tree result = build_call_nofold (fn, 3, dest, src, len);
3379 return expand_expr (result, target, mode, EXPAND_NORMAL);
3381 else
3383 const char *src_str;
3384 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3385 unsigned int dest_align
3386 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3387 rtx dest_mem, src_mem, len_rtx;
3389 /* If either SRC or DEST is not a pointer type, don't do this
3390 operation in-line. */
3391 if (dest_align == 0 || src_align == 0)
3392 return NULL_RTX;
3394 /* If LEN is not constant, call the normal function. */
3395 if (! host_integerp (len, 1))
3396 return NULL_RTX;
3398 len_rtx = expand_normal (len);
3399 src_str = c_getstr (src);
3401 /* If SRC is a string constant and block move would be done
3402 by pieces, we can avoid loading the string from memory
3403 and only stored the computed constants. */
3404 if (src_str
3405 && CONST_INT_P (len_rtx)
3406 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3407 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3408 CONST_CAST (char *, src_str),
3409 dest_align, false))
3411 dest_mem = get_memory_rtx (dest, len);
3412 set_mem_align (dest_mem, dest_align);
3413 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3414 builtin_memcpy_read_str,
3415 CONST_CAST (char *, src_str),
3416 dest_align, false, endp);
3417 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3418 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3419 return dest_mem;
3422 if (CONST_INT_P (len_rtx)
3423 && can_move_by_pieces (INTVAL (len_rtx),
3424 MIN (dest_align, src_align)))
3426 dest_mem = get_memory_rtx (dest, len);
3427 set_mem_align (dest_mem, dest_align);
3428 src_mem = get_memory_rtx (src, len);
3429 set_mem_align (src_mem, src_align);
3430 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3431 MIN (dest_align, src_align), endp);
3432 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3433 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3434 return dest_mem;
3437 return NULL_RTX;
3441 #ifndef HAVE_movstr
3442 # define HAVE_movstr 0
3443 # define CODE_FOR_movstr CODE_FOR_nothing
3444 #endif
3446 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3447 we failed, the caller should emit a normal call, otherwise try to
3448 get the result in TARGET, if convenient. If ENDP is 0 return the
3449 destination pointer, if ENDP is 1 return the end pointer ala
3450 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3451 stpcpy. */
3453 static rtx
3454 expand_movstr (tree dest, tree src, rtx target, int endp)
3456 rtx end;
3457 rtx dest_mem;
3458 rtx src_mem;
3459 rtx insn;
3460 const struct insn_data * data;
3462 if (!HAVE_movstr)
3463 return NULL_RTX;
3465 dest_mem = get_memory_rtx (dest, NULL);
3466 src_mem = get_memory_rtx (src, NULL);
3467 if (!endp)
3469 target = force_reg (Pmode, XEXP (dest_mem, 0));
3470 dest_mem = replace_equiv_address (dest_mem, target);
3471 end = gen_reg_rtx (Pmode);
3473 else
3475 if (target == 0 || target == const0_rtx)
3477 end = gen_reg_rtx (Pmode);
3478 if (target == 0)
3479 target = end;
3481 else
3482 end = target;
3485 data = insn_data + CODE_FOR_movstr;
3487 if (data->operand[0].mode != VOIDmode)
3488 end = gen_lowpart (data->operand[0].mode, end);
3490 insn = data->genfun (end, dest_mem, src_mem);
3492 gcc_assert (insn);
3494 emit_insn (insn);
3496 /* movstr is supposed to set end to the address of the NUL
3497 terminator. If the caller requested a mempcpy-like return value,
3498 adjust it. */
3499 if (endp == 1 && target != const0_rtx)
3501 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3502 emit_move_insn (target, force_operand (tem, NULL_RTX));
3505 return target;
3508 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3509 NULL_RTX if we failed the caller should emit a normal call, otherwise
3510 try to get the result in TARGET, if convenient (and in mode MODE if that's
3511 convenient). */
3513 static rtx
3514 expand_builtin_strcpy (tree exp, rtx target)
3516 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3518 tree dest = CALL_EXPR_ARG (exp, 0);
3519 tree src = CALL_EXPR_ARG (exp, 1);
3520 return expand_builtin_strcpy_args (dest, src, target);
3522 return NULL_RTX;
3525 /* Helper function to do the actual work for expand_builtin_strcpy. The
3526 arguments to the builtin_strcpy call DEST and SRC are broken out
3527 so that this can also be called without constructing an actual CALL_EXPR.
3528 The other arguments and return value are the same as for
3529 expand_builtin_strcpy. */
3531 static rtx
3532 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3534 return expand_movstr (dest, src, target, /*endp=*/0);
3537 /* Expand a call EXP to the stpcpy builtin.
3538 Return NULL_RTX if we failed the caller should emit a normal call,
3539 otherwise try to get the result in TARGET, if convenient (and in
3540 mode MODE if that's convenient). */
3542 static rtx
3543 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3545 tree dst, src;
3546 location_t loc = EXPR_LOCATION (exp);
3548 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3549 return NULL_RTX;
3551 dst = CALL_EXPR_ARG (exp, 0);
3552 src = CALL_EXPR_ARG (exp, 1);
3554 /* If return value is ignored, transform stpcpy into strcpy. */
3555 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3557 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3558 tree result = build_call_nofold (fn, 2, dst, src);
3559 return expand_expr (result, target, mode, EXPAND_NORMAL);
3561 else
3563 tree len, lenp1;
3564 rtx ret;
3566 /* Ensure we get an actual string whose length can be evaluated at
3567 compile-time, not an expression containing a string. This is
3568 because the latter will potentially produce pessimized code
3569 when used to produce the return value. */
3570 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3571 return expand_movstr (dst, src, target, /*endp=*/2);
3573 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3574 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3575 target, mode, /*endp=*/2);
3577 if (ret)
3578 return ret;
3580 if (TREE_CODE (len) == INTEGER_CST)
3582 rtx len_rtx = expand_normal (len);
3584 if (CONST_INT_P (len_rtx))
3586 ret = expand_builtin_strcpy_args (dst, src, target);
3588 if (ret)
3590 if (! target)
3592 if (mode != VOIDmode)
3593 target = gen_reg_rtx (mode);
3594 else
3595 target = gen_reg_rtx (GET_MODE (ret));
3597 if (GET_MODE (target) != GET_MODE (ret))
3598 ret = gen_lowpart (GET_MODE (target), ret);
3600 ret = plus_constant (ret, INTVAL (len_rtx));
3601 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3602 gcc_assert (ret);
3604 return target;
3609 return expand_movstr (dst, src, target, /*endp=*/2);
3613 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3614 bytes from constant string DATA + OFFSET and return it as target
3615 constant. */
3618 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3619 enum machine_mode mode)
3621 const char *str = (const char *) data;
3623 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3624 return const0_rtx;
3626 return c_readstr (str + offset, mode);
3629 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3630 NULL_RTX if we failed the caller should emit a normal call. */
3632 static rtx
3633 expand_builtin_strncpy (tree exp, rtx target)
3635 location_t loc = EXPR_LOCATION (exp);
3637 if (validate_arglist (exp,
3638 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3640 tree dest = CALL_EXPR_ARG (exp, 0);
3641 tree src = CALL_EXPR_ARG (exp, 1);
3642 tree len = CALL_EXPR_ARG (exp, 2);
3643 tree slen = c_strlen (src, 1);
3645 /* We must be passed a constant len and src parameter. */
3646 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3647 return NULL_RTX;
3649 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3651 /* We're required to pad with trailing zeros if the requested
3652 len is greater than strlen(s2)+1. In that case try to
3653 use store_by_pieces, if it fails, punt. */
3654 if (tree_int_cst_lt (slen, len))
3656 unsigned int dest_align
3657 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3658 const char *p = c_getstr (src);
3659 rtx dest_mem;
3661 if (!p || dest_align == 0 || !host_integerp (len, 1)
3662 || !can_store_by_pieces (tree_low_cst (len, 1),
3663 builtin_strncpy_read_str,
3664 CONST_CAST (char *, p),
3665 dest_align, false))
3666 return NULL_RTX;
3668 dest_mem = get_memory_rtx (dest, len);
3669 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3670 builtin_strncpy_read_str,
3671 CONST_CAST (char *, p), dest_align, false, 0);
3672 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3673 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3674 return dest_mem;
3677 return NULL_RTX;
3680 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3681 bytes from constant string DATA + OFFSET and return it as target
3682 constant. */
3685 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3686 enum machine_mode mode)
3688 const char *c = (const char *) data;
3689 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3691 memset (p, *c, GET_MODE_SIZE (mode));
3693 return c_readstr (p, mode);
3696 /* Callback routine for store_by_pieces. Return the RTL of a register
3697 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3698 char value given in the RTL register data. For example, if mode is
3699 4 bytes wide, return the RTL for 0x01010101*data. */
3701 static rtx
3702 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3703 enum machine_mode mode)
3705 rtx target, coeff;
3706 size_t size;
3707 char *p;
3709 size = GET_MODE_SIZE (mode);
3710 if (size == 1)
3711 return (rtx) data;
3713 p = XALLOCAVEC (char, size);
3714 memset (p, 1, size);
3715 coeff = c_readstr (p, mode);
3717 target = convert_to_mode (mode, (rtx) data, 1);
3718 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3719 return force_reg (mode, target);
3722 /* Expand expression EXP, which is a call to the memset builtin. Return
3723 NULL_RTX if we failed the caller should emit a normal call, otherwise
3724 try to get the result in TARGET, if convenient (and in mode MODE if that's
3725 convenient). */
3727 static rtx
3728 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3730 if (!validate_arglist (exp,
3731 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3732 return NULL_RTX;
3733 else
3735 tree dest = CALL_EXPR_ARG (exp, 0);
3736 tree val = CALL_EXPR_ARG (exp, 1);
3737 tree len = CALL_EXPR_ARG (exp, 2);
3738 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3742 /* Helper function to do the actual work for expand_builtin_memset. The
3743 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3744 so that this can also be called without constructing an actual CALL_EXPR.
3745 The other arguments and return value are the same as for
3746 expand_builtin_memset. */
3748 static rtx
3749 expand_builtin_memset_args (tree dest, tree val, tree len,
3750 rtx target, enum machine_mode mode, tree orig_exp)
3752 tree fndecl, fn;
3753 enum built_in_function fcode;
3754 char c;
3755 unsigned int dest_align;
3756 rtx dest_mem, dest_addr, len_rtx;
3757 HOST_WIDE_INT expected_size = -1;
3758 unsigned int expected_align = 0;
3760 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3762 /* If DEST is not a pointer type, don't do this operation in-line. */
3763 if (dest_align == 0)
3764 return NULL_RTX;
3766 if (currently_expanding_gimple_stmt)
3767 stringop_block_profile (currently_expanding_gimple_stmt,
3768 &expected_align, &expected_size);
3770 if (expected_align < dest_align)
3771 expected_align = dest_align;
3773 /* If the LEN parameter is zero, return DEST. */
3774 if (integer_zerop (len))
3776 /* Evaluate and ignore VAL in case it has side-effects. */
3777 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3778 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3781 /* Stabilize the arguments in case we fail. */
3782 dest = builtin_save_expr (dest);
3783 val = builtin_save_expr (val);
3784 len = builtin_save_expr (len);
3786 len_rtx = expand_normal (len);
3787 dest_mem = get_memory_rtx (dest, len);
3789 if (TREE_CODE (val) != INTEGER_CST)
3791 rtx val_rtx;
3793 val_rtx = expand_normal (val);
3794 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3795 val_rtx, 0);
3797 /* Assume that we can memset by pieces if we can store
3798 * the coefficients by pieces (in the required modes).
3799 * We can't pass builtin_memset_gen_str as that emits RTL. */
3800 c = 1;
3801 if (host_integerp (len, 1)
3802 && can_store_by_pieces (tree_low_cst (len, 1),
3803 builtin_memset_read_str, &c, dest_align,
3804 true))
3806 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3807 val_rtx);
3808 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3809 builtin_memset_gen_str, val_rtx, dest_align,
3810 true, 0);
3812 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3813 dest_align, expected_align,
3814 expected_size))
3815 goto do_libcall;
3817 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3818 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3819 return dest_mem;
3822 if (target_char_cast (val, &c))
3823 goto do_libcall;
3825 if (c)
3827 if (host_integerp (len, 1)
3828 && can_store_by_pieces (tree_low_cst (len, 1),
3829 builtin_memset_read_str, &c, dest_align,
3830 true))
3831 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3832 builtin_memset_read_str, &c, dest_align, true, 0);
3833 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3834 dest_align, expected_align,
3835 expected_size))
3836 goto do_libcall;
3838 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3839 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3840 return dest_mem;
3843 set_mem_align (dest_mem, dest_align);
3844 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3845 CALL_EXPR_TAILCALL (orig_exp)
3846 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3847 expected_align, expected_size);
3849 if (dest_addr == 0)
3851 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3852 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3855 return dest_addr;
3857 do_libcall:
3858 fndecl = get_callee_fndecl (orig_exp);
3859 fcode = DECL_FUNCTION_CODE (fndecl);
3860 if (fcode == BUILT_IN_MEMSET)
3861 fn = build_call_nofold (fndecl, 3, dest, val, len);
3862 else if (fcode == BUILT_IN_BZERO)
3863 fn = build_call_nofold (fndecl, 2, dest, len);
3864 else
3865 gcc_unreachable ();
3866 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3867 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3868 return expand_call (fn, target, target == const0_rtx);
3871 /* Expand expression EXP, which is a call to the bzero builtin. Return
3872 NULL_RTX if we failed the caller should emit a normal call. */
3874 static rtx
3875 expand_builtin_bzero (tree exp)
3877 tree dest, size;
3878 location_t loc = EXPR_LOCATION (exp);
3880 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3881 return NULL_RTX;
3883 dest = CALL_EXPR_ARG (exp, 0);
3884 size = CALL_EXPR_ARG (exp, 1);
3886 /* New argument list transforming bzero(ptr x, int y) to
3887 memset(ptr x, int 0, size_t y). This is done this way
3888 so that if it isn't expanded inline, we fallback to
3889 calling bzero instead of memset. */
3891 return expand_builtin_memset_args (dest, integer_zero_node,
3892 fold_convert_loc (loc, sizetype, size),
3893 const0_rtx, VOIDmode, exp);
3896 /* Expand expression EXP, which is a call to the memcmp built-in function.
3897 Return NULL_RTX if we failed and the
3898 caller should emit a normal call, otherwise try to get the result in
3899 TARGET, if convenient (and in mode MODE, if that's convenient). */
3901 static rtx
3902 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3903 ATTRIBUTE_UNUSED enum machine_mode mode)
3905 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3907 if (!validate_arglist (exp,
3908 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3909 return NULL_RTX;
3911 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3913 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3914 rtx result;
3915 rtx insn;
3916 tree arg1 = CALL_EXPR_ARG (exp, 0);
3917 tree arg2 = CALL_EXPR_ARG (exp, 1);
3918 tree len = CALL_EXPR_ARG (exp, 2);
3920 int arg1_align
3921 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3922 int arg2_align
3923 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3924 enum machine_mode insn_mode;
3926 #ifdef HAVE_cmpmemsi
3927 if (HAVE_cmpmemsi)
3928 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3929 else
3930 #endif
3931 #ifdef HAVE_cmpstrnsi
3932 if (HAVE_cmpstrnsi)
3933 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3934 else
3935 #endif
3936 return NULL_RTX;
3938 /* If we don't have POINTER_TYPE, call the function. */
3939 if (arg1_align == 0 || arg2_align == 0)
3940 return NULL_RTX;
3942 /* Make a place to write the result of the instruction. */
3943 result = target;
3944 if (! (result != 0
3945 && REG_P (result) && GET_MODE (result) == insn_mode
3946 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3947 result = gen_reg_rtx (insn_mode);
3949 arg1_rtx = get_memory_rtx (arg1, len);
3950 arg2_rtx = get_memory_rtx (arg2, len);
3951 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3953 /* Set MEM_SIZE as appropriate. */
3954 if (CONST_INT_P (arg3_rtx))
3956 set_mem_size (arg1_rtx, arg3_rtx);
3957 set_mem_size (arg2_rtx, arg3_rtx);
3960 #ifdef HAVE_cmpmemsi
3961 if (HAVE_cmpmemsi)
3962 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3963 GEN_INT (MIN (arg1_align, arg2_align)));
3964 else
3965 #endif
3966 #ifdef HAVE_cmpstrnsi
3967 if (HAVE_cmpstrnsi)
3968 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3969 GEN_INT (MIN (arg1_align, arg2_align)));
3970 else
3971 #endif
3972 gcc_unreachable ();
3974 if (insn)
3975 emit_insn (insn);
3976 else
3977 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3978 TYPE_MODE (integer_type_node), 3,
3979 XEXP (arg1_rtx, 0), Pmode,
3980 XEXP (arg2_rtx, 0), Pmode,
3981 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3982 TYPE_UNSIGNED (sizetype)),
3983 TYPE_MODE (sizetype));
3985 /* Return the value in the proper mode for this function. */
3986 mode = TYPE_MODE (TREE_TYPE (exp));
3987 if (GET_MODE (result) == mode)
3988 return result;
3989 else if (target != 0)
3991 convert_move (target, result, 0);
3992 return target;
3994 else
3995 return convert_to_mode (mode, result, 0);
3997 #endif
3999 return NULL_RTX;
4002 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4003 if we failed the caller should emit a normal call, otherwise try to get
4004 the result in TARGET, if convenient. */
4006 static rtx
4007 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4009 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4010 return NULL_RTX;
4012 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4013 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4014 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4016 rtx arg1_rtx, arg2_rtx;
4017 rtx result, insn = NULL_RTX;
4018 tree fndecl, fn;
4019 tree arg1 = CALL_EXPR_ARG (exp, 0);
4020 tree arg2 = CALL_EXPR_ARG (exp, 1);
4022 int arg1_align
4023 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4024 int arg2_align
4025 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4027 /* If we don't have POINTER_TYPE, call the function. */
4028 if (arg1_align == 0 || arg2_align == 0)
4029 return NULL_RTX;
4031 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4032 arg1 = builtin_save_expr (arg1);
4033 arg2 = builtin_save_expr (arg2);
4035 arg1_rtx = get_memory_rtx (arg1, NULL);
4036 arg2_rtx = get_memory_rtx (arg2, NULL);
4038 #ifdef HAVE_cmpstrsi
4039 /* Try to call cmpstrsi. */
4040 if (HAVE_cmpstrsi)
4042 enum machine_mode insn_mode
4043 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4045 /* Make a place to write the result of the instruction. */
4046 result = target;
4047 if (! (result != 0
4048 && REG_P (result) && GET_MODE (result) == insn_mode
4049 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4050 result = gen_reg_rtx (insn_mode);
4052 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4053 GEN_INT (MIN (arg1_align, arg2_align)));
4055 #endif
4056 #ifdef HAVE_cmpstrnsi
4057 /* Try to determine at least one length and call cmpstrnsi. */
4058 if (!insn && HAVE_cmpstrnsi)
4060 tree len;
4061 rtx arg3_rtx;
4063 enum machine_mode insn_mode
4064 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4065 tree len1 = c_strlen (arg1, 1);
4066 tree len2 = c_strlen (arg2, 1);
4068 if (len1)
4069 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4070 if (len2)
4071 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4073 /* If we don't have a constant length for the first, use the length
4074 of the second, if we know it. We don't require a constant for
4075 this case; some cost analysis could be done if both are available
4076 but neither is constant. For now, assume they're equally cheap,
4077 unless one has side effects. If both strings have constant lengths,
4078 use the smaller. */
4080 if (!len1)
4081 len = len2;
4082 else if (!len2)
4083 len = len1;
4084 else if (TREE_SIDE_EFFECTS (len1))
4085 len = len2;
4086 else if (TREE_SIDE_EFFECTS (len2))
4087 len = len1;
4088 else if (TREE_CODE (len1) != INTEGER_CST)
4089 len = len2;
4090 else if (TREE_CODE (len2) != INTEGER_CST)
4091 len = len1;
4092 else if (tree_int_cst_lt (len1, len2))
4093 len = len1;
4094 else
4095 len = len2;
4097 /* If both arguments have side effects, we cannot optimize. */
4098 if (!len || TREE_SIDE_EFFECTS (len))
4099 goto do_libcall;
4101 arg3_rtx = expand_normal (len);
4103 /* Make a place to write the result of the instruction. */
4104 result = target;
4105 if (! (result != 0
4106 && REG_P (result) && GET_MODE (result) == insn_mode
4107 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4108 result = gen_reg_rtx (insn_mode);
4110 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4111 GEN_INT (MIN (arg1_align, arg2_align)));
4113 #endif
4115 if (insn)
4117 enum machine_mode mode;
4118 emit_insn (insn);
4120 /* Return the value in the proper mode for this function. */
4121 mode = TYPE_MODE (TREE_TYPE (exp));
4122 if (GET_MODE (result) == mode)
4123 return result;
4124 if (target == 0)
4125 return convert_to_mode (mode, result, 0);
4126 convert_move (target, result, 0);
4127 return target;
4130 /* Expand the library call ourselves using a stabilized argument
4131 list to avoid re-evaluating the function's arguments twice. */
4132 #ifdef HAVE_cmpstrnsi
4133 do_libcall:
4134 #endif
4135 fndecl = get_callee_fndecl (exp);
4136 fn = build_call_nofold (fndecl, 2, arg1, arg2);
4137 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4138 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4139 return expand_call (fn, target, target == const0_rtx);
4141 #endif
4142 return NULL_RTX;
4145 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4146 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4147 the result in TARGET, if convenient. */
4149 static rtx
4150 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4151 ATTRIBUTE_UNUSED enum machine_mode mode)
4153 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4155 if (!validate_arglist (exp,
4156 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4157 return NULL_RTX;
4159 /* If c_strlen can determine an expression for one of the string
4160 lengths, and it doesn't have side effects, then emit cmpstrnsi
4161 using length MIN(strlen(string)+1, arg3). */
4162 #ifdef HAVE_cmpstrnsi
4163 if (HAVE_cmpstrnsi)
4165 tree len, len1, len2;
4166 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4167 rtx result, insn;
4168 tree fndecl, fn;
4169 tree arg1 = CALL_EXPR_ARG (exp, 0);
4170 tree arg2 = CALL_EXPR_ARG (exp, 1);
4171 tree arg3 = CALL_EXPR_ARG (exp, 2);
4173 int arg1_align
4174 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4175 int arg2_align
4176 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4177 enum machine_mode insn_mode
4178 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4180 len1 = c_strlen (arg1, 1);
4181 len2 = c_strlen (arg2, 1);
4183 if (len1)
4184 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4185 if (len2)
4186 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4188 /* If we don't have a constant length for the first, use the length
4189 of the second, if we know it. We don't require a constant for
4190 this case; some cost analysis could be done if both are available
4191 but neither is constant. For now, assume they're equally cheap,
4192 unless one has side effects. If both strings have constant lengths,
4193 use the smaller. */
4195 if (!len1)
4196 len = len2;
4197 else if (!len2)
4198 len = len1;
4199 else if (TREE_SIDE_EFFECTS (len1))
4200 len = len2;
4201 else if (TREE_SIDE_EFFECTS (len2))
4202 len = len1;
4203 else if (TREE_CODE (len1) != INTEGER_CST)
4204 len = len2;
4205 else if (TREE_CODE (len2) != INTEGER_CST)
4206 len = len1;
4207 else if (tree_int_cst_lt (len1, len2))
4208 len = len1;
4209 else
4210 len = len2;
4212 /* If both arguments have side effects, we cannot optimize. */
4213 if (!len || TREE_SIDE_EFFECTS (len))
4214 return NULL_RTX;
4216 /* The actual new length parameter is MIN(len,arg3). */
4217 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4218 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4220 /* If we don't have POINTER_TYPE, call the function. */
4221 if (arg1_align == 0 || arg2_align == 0)
4222 return NULL_RTX;
4224 /* Make a place to write the result of the instruction. */
4225 result = target;
4226 if (! (result != 0
4227 && REG_P (result) && GET_MODE (result) == insn_mode
4228 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4229 result = gen_reg_rtx (insn_mode);
4231 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4232 arg1 = builtin_save_expr (arg1);
4233 arg2 = builtin_save_expr (arg2);
4234 len = builtin_save_expr (len);
4236 arg1_rtx = get_memory_rtx (arg1, len);
4237 arg2_rtx = get_memory_rtx (arg2, len);
4238 arg3_rtx = expand_normal (len);
4239 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4240 GEN_INT (MIN (arg1_align, arg2_align)));
4241 if (insn)
4243 emit_insn (insn);
4245 /* Return the value in the proper mode for this function. */
4246 mode = TYPE_MODE (TREE_TYPE (exp));
4247 if (GET_MODE (result) == mode)
4248 return result;
4249 if (target == 0)
4250 return convert_to_mode (mode, result, 0);
4251 convert_move (target, result, 0);
4252 return target;
4255 /* Expand the library call ourselves using a stabilized argument
4256 list to avoid re-evaluating the function's arguments twice. */
4257 fndecl = get_callee_fndecl (exp);
4258 fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
4259 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4260 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4261 return expand_call (fn, target, target == const0_rtx);
4263 #endif
4264 return NULL_RTX;
4267 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4268 if that's convenient. */
4271 expand_builtin_saveregs (void)
4273 rtx val, seq;
4275 /* Don't do __builtin_saveregs more than once in a function.
4276 Save the result of the first call and reuse it. */
4277 if (saveregs_value != 0)
4278 return saveregs_value;
4280 /* When this function is called, it means that registers must be
4281 saved on entry to this function. So we migrate the call to the
4282 first insn of this function. */
4284 start_sequence ();
4286 /* Do whatever the machine needs done in this case. */
4287 val = targetm.calls.expand_builtin_saveregs ();
4289 seq = get_insns ();
4290 end_sequence ();
4292 saveregs_value = val;
4294 /* Put the insns after the NOTE that starts the function. If this
4295 is inside a start_sequence, make the outer-level insn chain current, so
4296 the code is placed at the start of the function. */
4297 push_topmost_sequence ();
4298 emit_insn_after (seq, entry_of_function ());
4299 pop_topmost_sequence ();
4301 return val;
4304 /* __builtin_args_info (N) returns word N of the arg space info
4305 for the current function. The number and meanings of words
4306 is controlled by the definition of CUMULATIVE_ARGS. */
4308 static rtx
4309 expand_builtin_args_info (tree exp)
4311 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4312 int *word_ptr = (int *) &crtl->args.info;
4314 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4316 if (call_expr_nargs (exp) != 0)
4318 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4319 error ("argument of %<__builtin_args_info%> must be constant");
4320 else
4322 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4324 if (wordnum < 0 || wordnum >= nwords)
4325 error ("argument of %<__builtin_args_info%> out of range");
4326 else
4327 return GEN_INT (word_ptr[wordnum]);
4330 else
4331 error ("missing argument in %<__builtin_args_info%>");
4333 return const0_rtx;
4336 /* Expand a call to __builtin_next_arg. */
4338 static rtx
4339 expand_builtin_next_arg (void)
4341 /* Checking arguments is already done in fold_builtin_next_arg
4342 that must be called before this function. */
4343 return expand_binop (ptr_mode, add_optab,
4344 crtl->args.internal_arg_pointer,
4345 crtl->args.arg_offset_rtx,
4346 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4349 /* Make it easier for the backends by protecting the valist argument
4350 from multiple evaluations. */
4352 static tree
4353 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4355 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4357 gcc_assert (vatype != NULL_TREE);
4359 if (TREE_CODE (vatype) == ARRAY_TYPE)
4361 if (TREE_SIDE_EFFECTS (valist))
4362 valist = save_expr (valist);
4364 /* For this case, the backends will be expecting a pointer to
4365 vatype, but it's possible we've actually been given an array
4366 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4367 So fix it. */
4368 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4370 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4371 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4374 else
4376 tree pt;
4378 if (! needs_lvalue)
4380 if (! TREE_SIDE_EFFECTS (valist))
4381 return valist;
4383 pt = build_pointer_type (vatype);
4384 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4385 TREE_SIDE_EFFECTS (valist) = 1;
4388 if (TREE_SIDE_EFFECTS (valist))
4389 valist = save_expr (valist);
4390 valist = build_fold_indirect_ref_loc (loc, valist);
4393 return valist;
4396 /* The "standard" definition of va_list is void*. */
4398 tree
4399 std_build_builtin_va_list (void)
4401 return ptr_type_node;
4404 /* The "standard" abi va_list is va_list_type_node. */
4406 tree
4407 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4409 return va_list_type_node;
4412 /* The "standard" type of va_list is va_list_type_node. */
4414 tree
4415 std_canonical_va_list_type (tree type)
4417 tree wtype, htype;
4419 if (INDIRECT_REF_P (type))
4420 type = TREE_TYPE (type);
4421 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4422 type = TREE_TYPE (type);
4423 wtype = va_list_type_node;
4424 htype = type;
4425 /* Treat structure va_list types. */
4426 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4427 htype = TREE_TYPE (htype);
4428 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4430 /* If va_list is an array type, the argument may have decayed
4431 to a pointer type, e.g. by being passed to another function.
4432 In that case, unwrap both types so that we can compare the
4433 underlying records. */
4434 if (TREE_CODE (htype) == ARRAY_TYPE
4435 || POINTER_TYPE_P (htype))
4437 wtype = TREE_TYPE (wtype);
4438 htype = TREE_TYPE (htype);
4441 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4442 return va_list_type_node;
4444 return NULL_TREE;
4447 /* The "standard" implementation of va_start: just assign `nextarg' to
4448 the variable. */
4450 void
4451 std_expand_builtin_va_start (tree valist, rtx nextarg)
4453 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4454 convert_move (va_r, nextarg, 0);
4457 /* Expand EXP, a call to __builtin_va_start. */
4459 static rtx
4460 expand_builtin_va_start (tree exp)
4462 rtx nextarg;
4463 tree valist;
4464 location_t loc = EXPR_LOCATION (exp);
4466 if (call_expr_nargs (exp) < 2)
4468 error_at (loc, "too few arguments to function %<va_start%>");
4469 return const0_rtx;
4472 if (fold_builtin_next_arg (exp, true))
4473 return const0_rtx;
4475 nextarg = expand_builtin_next_arg ();
4476 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4478 if (targetm.expand_builtin_va_start)
4479 targetm.expand_builtin_va_start (valist, nextarg);
4480 else
4481 std_expand_builtin_va_start (valist, nextarg);
4483 return const0_rtx;
4486 /* The "standard" implementation of va_arg: read the value from the
4487 current (padded) address and increment by the (padded) size. */
4489 tree
4490 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4491 gimple_seq *post_p)
4493 tree addr, t, type_size, rounded_size, valist_tmp;
4494 unsigned HOST_WIDE_INT align, boundary;
4495 bool indirect;
4497 #ifdef ARGS_GROW_DOWNWARD
4498 /* All of the alignment and movement below is for args-grow-up machines.
4499 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4500 implement their own specialized gimplify_va_arg_expr routines. */
4501 gcc_unreachable ();
4502 #endif
4504 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4505 if (indirect)
4506 type = build_pointer_type (type);
4508 align = PARM_BOUNDARY / BITS_PER_UNIT;
4509 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4511 /* When we align parameter on stack for caller, if the parameter
4512 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4513 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4514 here with caller. */
4515 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4516 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4518 boundary /= BITS_PER_UNIT;
4520 /* Hoist the valist value into a temporary for the moment. */
4521 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4523 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4524 requires greater alignment, we must perform dynamic alignment. */
4525 if (boundary > align
4526 && !integer_zerop (TYPE_SIZE (type)))
4528 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4529 fold_build2 (POINTER_PLUS_EXPR,
4530 TREE_TYPE (valist),
4531 valist_tmp, size_int (boundary - 1)));
4532 gimplify_and_add (t, pre_p);
4534 t = fold_convert (sizetype, valist_tmp);
4535 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4536 fold_convert (TREE_TYPE (valist),
4537 fold_build2 (BIT_AND_EXPR, sizetype, t,
4538 size_int (-boundary))));
4539 gimplify_and_add (t, pre_p);
4541 else
4542 boundary = align;
4544 /* If the actual alignment is less than the alignment of the type,
4545 adjust the type accordingly so that we don't assume strict alignment
4546 when dereferencing the pointer. */
4547 boundary *= BITS_PER_UNIT;
4548 if (boundary < TYPE_ALIGN (type))
4550 type = build_variant_type_copy (type);
4551 TYPE_ALIGN (type) = boundary;
4554 /* Compute the rounded size of the type. */
4555 type_size = size_in_bytes (type);
4556 rounded_size = round_up (type_size, align);
4558 /* Reduce rounded_size so it's sharable with the postqueue. */
4559 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4561 /* Get AP. */
4562 addr = valist_tmp;
4563 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4565 /* Small args are padded downward. */
4566 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4567 rounded_size, size_int (align));
4568 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4569 size_binop (MINUS_EXPR, rounded_size, type_size));
4570 addr = fold_build2 (POINTER_PLUS_EXPR,
4571 TREE_TYPE (addr), addr, t);
4574 /* Compute new value for AP. */
4575 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4576 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4577 gimplify_and_add (t, pre_p);
4579 addr = fold_convert (build_pointer_type (type), addr);
4581 if (indirect)
4582 addr = build_va_arg_indirect_ref (addr);
4584 return build_va_arg_indirect_ref (addr);
4587 /* Build an indirect-ref expression over the given TREE, which represents a
4588 piece of a va_arg() expansion. */
4589 tree
4590 build_va_arg_indirect_ref (tree addr)
4592 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4594 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4595 mf_mark (addr);
4597 return addr;
4600 /* Return a dummy expression of type TYPE in order to keep going after an
4601 error. */
4603 static tree
4604 dummy_object (tree type)
4606 tree t = build_int_cst (build_pointer_type (type), 0);
4607 return build1 (INDIRECT_REF, type, t);
4610 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4611 builtin function, but a very special sort of operator. */
4613 enum gimplify_status
4614 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4616 tree promoted_type, have_va_type;
4617 tree valist = TREE_OPERAND (*expr_p, 0);
4618 tree type = TREE_TYPE (*expr_p);
4619 tree t;
4620 location_t loc = EXPR_LOCATION (*expr_p);
4622 /* Verify that valist is of the proper type. */
4623 have_va_type = TREE_TYPE (valist);
4624 if (have_va_type == error_mark_node)
4625 return GS_ERROR;
4626 have_va_type = targetm.canonical_va_list_type (have_va_type);
4628 if (have_va_type == NULL_TREE)
4630 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4631 return GS_ERROR;
4634 /* Generate a diagnostic for requesting data of a type that cannot
4635 be passed through `...' due to type promotion at the call site. */
4636 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4637 != type)
4639 static bool gave_help;
4640 bool warned;
4642 /* Unfortunately, this is merely undefined, rather than a constraint
4643 violation, so we cannot make this an error. If this call is never
4644 executed, the program is still strictly conforming. */
4645 warned = warning_at (loc, 0,
4646 "%qT is promoted to %qT when passed through %<...%>",
4647 type, promoted_type);
4648 if (!gave_help && warned)
4650 gave_help = true;
4651 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4652 promoted_type, type);
4655 /* We can, however, treat "undefined" any way we please.
4656 Call abort to encourage the user to fix the program. */
4657 if (warned)
4658 inform (loc, "if this code is reached, the program will abort");
4659 /* Before the abort, allow the evaluation of the va_list
4660 expression to exit or longjmp. */
4661 gimplify_and_add (valist, pre_p);
4662 t = build_call_expr_loc (loc,
4663 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4664 gimplify_and_add (t, pre_p);
4666 /* This is dead code, but go ahead and finish so that the
4667 mode of the result comes out right. */
4668 *expr_p = dummy_object (type);
4669 return GS_ALL_DONE;
4671 else
4673 /* Make it easier for the backends by protecting the valist argument
4674 from multiple evaluations. */
4675 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4677 /* For this case, the backends will be expecting a pointer to
4678 TREE_TYPE (abi), but it's possible we've
4679 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4680 So fix it. */
4681 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4683 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4684 valist = fold_convert_loc (loc, p1,
4685 build_fold_addr_expr_loc (loc, valist));
4688 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4690 else
4691 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4693 if (!targetm.gimplify_va_arg_expr)
4694 /* FIXME: Once most targets are converted we should merely
4695 assert this is non-null. */
4696 return GS_ALL_DONE;
4698 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4699 return GS_OK;
4703 /* Expand EXP, a call to __builtin_va_end. */
4705 static rtx
4706 expand_builtin_va_end (tree exp)
4708 tree valist = CALL_EXPR_ARG (exp, 0);
4710 /* Evaluate for side effects, if needed. I hate macros that don't
4711 do that. */
4712 if (TREE_SIDE_EFFECTS (valist))
4713 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4715 return const0_rtx;
4718 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4719 builtin rather than just as an assignment in stdarg.h because of the
4720 nastiness of array-type va_list types. */
4722 static rtx
4723 expand_builtin_va_copy (tree exp)
4725 tree dst, src, t;
4726 location_t loc = EXPR_LOCATION (exp);
4728 dst = CALL_EXPR_ARG (exp, 0);
4729 src = CALL_EXPR_ARG (exp, 1);
4731 dst = stabilize_va_list_loc (loc, dst, 1);
4732 src = stabilize_va_list_loc (loc, src, 0);
4734 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4736 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4738 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4739 TREE_SIDE_EFFECTS (t) = 1;
4740 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4742 else
4744 rtx dstb, srcb, size;
4746 /* Evaluate to pointers. */
4747 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4748 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4749 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4750 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4752 dstb = convert_memory_address (Pmode, dstb);
4753 srcb = convert_memory_address (Pmode, srcb);
4755 /* "Dereference" to BLKmode memories. */
4756 dstb = gen_rtx_MEM (BLKmode, dstb);
4757 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4758 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4759 srcb = gen_rtx_MEM (BLKmode, srcb);
4760 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4761 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4763 /* Copy. */
4764 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4767 return const0_rtx;
4770 /* Expand a call to one of the builtin functions __builtin_frame_address or
4771 __builtin_return_address. */
4773 static rtx
4774 expand_builtin_frame_address (tree fndecl, tree exp)
4776 /* The argument must be a nonnegative integer constant.
4777 It counts the number of frames to scan up the stack.
4778 The value is the return address saved in that frame. */
4779 if (call_expr_nargs (exp) == 0)
4780 /* Warning about missing arg was already issued. */
4781 return const0_rtx;
4782 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4784 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4785 error ("invalid argument to %<__builtin_frame_address%>");
4786 else
4787 error ("invalid argument to %<__builtin_return_address%>");
4788 return const0_rtx;
4790 else
4792 rtx tem
4793 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4794 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4796 /* Some ports cannot access arbitrary stack frames. */
4797 if (tem == NULL)
4799 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4800 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4801 else
4802 warning (0, "unsupported argument to %<__builtin_return_address%>");
4803 return const0_rtx;
4806 /* For __builtin_frame_address, return what we've got. */
4807 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4808 return tem;
4810 if (!REG_P (tem)
4811 && ! CONSTANT_P (tem))
4812 tem = copy_to_mode_reg (Pmode, tem);
4813 return tem;
4817 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4818 we failed and the caller should emit a normal call, otherwise try to get
4819 the result in TARGET, if convenient. */
4821 static rtx
4822 expand_builtin_alloca (tree exp, rtx target)
4824 rtx op0;
4825 rtx result;
4827 /* Emit normal call if marked not-inlineable. */
4828 if (CALL_CANNOT_INLINE_P (exp))
4829 return NULL_RTX;
4831 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4832 return NULL_RTX;
4834 /* Compute the argument. */
4835 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4837 /* Allocate the desired space. */
4838 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4839 result = convert_memory_address (ptr_mode, result);
4841 return result;
4844 /* Expand a call to a bswap builtin with argument ARG0. MODE
4845 is the mode to expand with. */
4847 static rtx
4848 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4850 enum machine_mode mode;
4851 tree arg;
4852 rtx op0;
4854 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4855 return NULL_RTX;
4857 arg = CALL_EXPR_ARG (exp, 0);
4858 mode = TYPE_MODE (TREE_TYPE (arg));
4859 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4861 target = expand_unop (mode, bswap_optab, op0, target, 1);
4863 gcc_assert (target);
4865 return convert_to_mode (mode, target, 0);
4868 /* Expand a call to a unary builtin in EXP.
4869 Return NULL_RTX if a normal call should be emitted rather than expanding the
4870 function in-line. If convenient, the result should be placed in TARGET.
4871 SUBTARGET may be used as the target for computing one of EXP's operands. */
4873 static rtx
4874 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4875 rtx subtarget, optab op_optab)
4877 rtx op0;
4879 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4880 return NULL_RTX;
4882 /* Compute the argument. */
4883 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4884 VOIDmode, EXPAND_NORMAL);
4885 /* Compute op, into TARGET if possible.
4886 Set TARGET to wherever the result comes back. */
4887 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4888 op_optab, op0, target, 1);
4889 gcc_assert (target);
4891 return convert_to_mode (target_mode, target, 0);
4894 /* Expand a call to __builtin_expect. We just return our argument
4895 as the builtin_expect semantic should've been already executed by
4896 tree branch prediction pass. */
4898 static rtx
4899 expand_builtin_expect (tree exp, rtx target)
4901 tree arg;
4903 if (call_expr_nargs (exp) < 2)
4904 return const0_rtx;
4905 arg = CALL_EXPR_ARG (exp, 0);
4907 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4908 /* When guessing was done, the hints should be already stripped away. */
4909 gcc_assert (!flag_guess_branch_prob
4910 || optimize == 0 || errorcount || sorrycount);
4911 return target;
4914 void
4915 expand_builtin_trap (void)
4917 #ifdef HAVE_trap
4918 if (HAVE_trap)
4919 emit_insn (gen_trap ());
4920 else
4921 #endif
4922 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4923 emit_barrier ();
4926 /* Expand a call to __builtin_unreachable. We do nothing except emit
4927 a barrier saying that control flow will not pass here.
4929 It is the responsibility of the program being compiled to ensure
4930 that control flow does never reach __builtin_unreachable. */
4931 static void
4932 expand_builtin_unreachable (void)
4934 emit_barrier ();
4937 /* Expand EXP, a call to fabs, fabsf or fabsl.
4938 Return NULL_RTX if a normal call should be emitted rather than expanding
4939 the function inline. If convenient, the result should be placed
4940 in TARGET. SUBTARGET may be used as the target for computing
4941 the operand. */
4943 static rtx
4944 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4946 enum machine_mode mode;
4947 tree arg;
4948 rtx op0;
4950 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4951 return NULL_RTX;
4953 arg = CALL_EXPR_ARG (exp, 0);
4954 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4955 mode = TYPE_MODE (TREE_TYPE (arg));
4956 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4957 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4960 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4961 Return NULL is a normal call should be emitted rather than expanding the
4962 function inline. If convenient, the result should be placed in TARGET.
4963 SUBTARGET may be used as the target for computing the operand. */
4965 static rtx
4966 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4968 rtx op0, op1;
4969 tree arg;
4971 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4972 return NULL_RTX;
4974 arg = CALL_EXPR_ARG (exp, 0);
4975 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4977 arg = CALL_EXPR_ARG (exp, 1);
4978 op1 = expand_normal (arg);
4980 return expand_copysign (op0, op1, target);
4983 /* Create a new constant string literal and return a char* pointer to it.
4984 The STRING_CST value is the LEN characters at STR. */
4985 tree
4986 build_string_literal (int len, const char *str)
4988 tree t, elem, index, type;
4990 t = build_string (len, str);
4991 elem = build_type_variant (char_type_node, 1, 0);
4992 index = build_index_type (size_int (len - 1));
4993 type = build_array_type (elem, index);
4994 TREE_TYPE (t) = type;
4995 TREE_CONSTANT (t) = 1;
4996 TREE_READONLY (t) = 1;
4997 TREE_STATIC (t) = 1;
4999 type = build_pointer_type (elem);
5000 t = build1 (ADDR_EXPR, type,
5001 build4 (ARRAY_REF, elem,
5002 t, integer_zero_node, NULL_TREE, NULL_TREE));
5003 return t;
5006 /* Expand a call to either the entry or exit function profiler. */
5008 static rtx
5009 expand_builtin_profile_func (bool exitp)
5011 rtx this_rtx, which;
5013 this_rtx = DECL_RTL (current_function_decl);
5014 gcc_assert (MEM_P (this_rtx));
5015 this_rtx = XEXP (this_rtx, 0);
5017 if (exitp)
5018 which = profile_function_exit_libfunc;
5019 else
5020 which = profile_function_entry_libfunc;
5022 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5023 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5025 Pmode);
5027 return const0_rtx;
5030 /* Expand a call to __builtin___clear_cache. */
5032 static rtx
5033 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5035 #ifndef HAVE_clear_cache
5036 #ifdef CLEAR_INSN_CACHE
5037 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5038 does something. Just do the default expansion to a call to
5039 __clear_cache(). */
5040 return NULL_RTX;
5041 #else
5042 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5043 does nothing. There is no need to call it. Do nothing. */
5044 return const0_rtx;
5045 #endif /* CLEAR_INSN_CACHE */
5046 #else
5047 /* We have a "clear_cache" insn, and it will handle everything. */
5048 tree begin, end;
5049 rtx begin_rtx, end_rtx;
5050 enum insn_code icode;
5052 /* We must not expand to a library call. If we did, any
5053 fallback library function in libgcc that might contain a call to
5054 __builtin___clear_cache() would recurse infinitely. */
5055 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5057 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5058 return const0_rtx;
5061 if (HAVE_clear_cache)
5063 icode = CODE_FOR_clear_cache;
5065 begin = CALL_EXPR_ARG (exp, 0);
5066 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5067 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5068 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5069 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5071 end = CALL_EXPR_ARG (exp, 1);
5072 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5073 end_rtx = convert_memory_address (Pmode, end_rtx);
5074 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5075 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5077 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5079 return const0_rtx;
5080 #endif /* HAVE_clear_cache */
5083 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5085 static rtx
5086 round_trampoline_addr (rtx tramp)
5088 rtx temp, addend, mask;
5090 /* If we don't need too much alignment, we'll have been guaranteed
5091 proper alignment by get_trampoline_type. */
5092 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5093 return tramp;
5095 /* Round address up to desired boundary. */
5096 temp = gen_reg_rtx (Pmode);
5097 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5098 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5100 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5101 temp, 0, OPTAB_LIB_WIDEN);
5102 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5103 temp, 0, OPTAB_LIB_WIDEN);
5105 return tramp;
5108 static rtx
5109 expand_builtin_init_trampoline (tree exp)
5111 tree t_tramp, t_func, t_chain;
5112 rtx m_tramp, r_tramp, r_chain, tmp;
5114 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5115 POINTER_TYPE, VOID_TYPE))
5116 return NULL_RTX;
5118 t_tramp = CALL_EXPR_ARG (exp, 0);
5119 t_func = CALL_EXPR_ARG (exp, 1);
5120 t_chain = CALL_EXPR_ARG (exp, 2);
5122 r_tramp = expand_normal (t_tramp);
5123 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5124 MEM_NOTRAP_P (m_tramp) = 1;
5126 /* The TRAMP argument should be the address of a field within the
5127 local function's FRAME decl. Let's see if we can fill in the
5128 to fill in the MEM_ATTRs for this memory. */
5129 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5130 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5131 true, 0);
5133 tmp = round_trampoline_addr (r_tramp);
5134 if (tmp != r_tramp)
5136 m_tramp = change_address (m_tramp, BLKmode, tmp);
5137 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5138 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5141 /* The FUNC argument should be the address of the nested function.
5142 Extract the actual function decl to pass to the hook. */
5143 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5144 t_func = TREE_OPERAND (t_func, 0);
5145 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5147 r_chain = expand_normal (t_chain);
5149 /* Generate insns to initialize the trampoline. */
5150 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5152 trampolines_created = 1;
5153 return const0_rtx;
5156 static rtx
5157 expand_builtin_adjust_trampoline (tree exp)
5159 rtx tramp;
5161 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5162 return NULL_RTX;
5164 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5165 tramp = round_trampoline_addr (tramp);
5166 if (targetm.calls.trampoline_adjust_address)
5167 tramp = targetm.calls.trampoline_adjust_address (tramp);
5169 return tramp;
5172 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5173 function. The function first checks whether the back end provides
5174 an insn to implement signbit for the respective mode. If not, it
5175 checks whether the floating point format of the value is such that
5176 the sign bit can be extracted. If that is not the case, the
5177 function returns NULL_RTX to indicate that a normal call should be
5178 emitted rather than expanding the function in-line. EXP is the
5179 expression that is a call to the builtin function; if convenient,
5180 the result should be placed in TARGET. */
5181 static rtx
5182 expand_builtin_signbit (tree exp, rtx target)
5184 const struct real_format *fmt;
5185 enum machine_mode fmode, imode, rmode;
5186 HOST_WIDE_INT hi, lo;
5187 tree arg;
5188 int word, bitpos;
5189 enum insn_code icode;
5190 rtx temp;
5191 location_t loc = EXPR_LOCATION (exp);
5193 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5194 return NULL_RTX;
5196 arg = CALL_EXPR_ARG (exp, 0);
5197 fmode = TYPE_MODE (TREE_TYPE (arg));
5198 rmode = TYPE_MODE (TREE_TYPE (exp));
5199 fmt = REAL_MODE_FORMAT (fmode);
5201 arg = builtin_save_expr (arg);
5203 /* Expand the argument yielding a RTX expression. */
5204 temp = expand_normal (arg);
5206 /* Check if the back end provides an insn that handles signbit for the
5207 argument's mode. */
5208 icode = signbit_optab->handlers [(int) fmode].insn_code;
5209 if (icode != CODE_FOR_nothing)
5211 rtx last = get_last_insn ();
5212 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5213 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5214 return target;
5215 delete_insns_since (last);
5218 /* For floating point formats without a sign bit, implement signbit
5219 as "ARG < 0.0". */
5220 bitpos = fmt->signbit_ro;
5221 if (bitpos < 0)
5223 /* But we can't do this if the format supports signed zero. */
5224 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5225 return NULL_RTX;
5227 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5228 build_real (TREE_TYPE (arg), dconst0));
5229 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5232 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5234 imode = int_mode_for_mode (fmode);
5235 if (imode == BLKmode)
5236 return NULL_RTX;
5237 temp = gen_lowpart (imode, temp);
5239 else
5241 imode = word_mode;
5242 /* Handle targets with different FP word orders. */
5243 if (FLOAT_WORDS_BIG_ENDIAN)
5244 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5245 else
5246 word = bitpos / BITS_PER_WORD;
5247 temp = operand_subword_force (temp, word, fmode);
5248 bitpos = bitpos % BITS_PER_WORD;
5251 /* Force the intermediate word_mode (or narrower) result into a
5252 register. This avoids attempting to create paradoxical SUBREGs
5253 of floating point modes below. */
5254 temp = force_reg (imode, temp);
5256 /* If the bitpos is within the "result mode" lowpart, the operation
5257 can be implement with a single bitwise AND. Otherwise, we need
5258 a right shift and an AND. */
5260 if (bitpos < GET_MODE_BITSIZE (rmode))
5262 if (bitpos < HOST_BITS_PER_WIDE_INT)
5264 hi = 0;
5265 lo = (HOST_WIDE_INT) 1 << bitpos;
5267 else
5269 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5270 lo = 0;
5273 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5274 temp = gen_lowpart (rmode, temp);
5275 temp = expand_binop (rmode, and_optab, temp,
5276 immed_double_const (lo, hi, rmode),
5277 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5279 else
5281 /* Perform a logical right shift to place the signbit in the least
5282 significant bit, then truncate the result to the desired mode
5283 and mask just this bit. */
5284 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5285 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5286 temp = gen_lowpart (rmode, temp);
5287 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5288 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5291 return temp;
5294 /* Expand fork or exec calls. TARGET is the desired target of the
5295 call. EXP is the call. FN is the
5296 identificator of the actual function. IGNORE is nonzero if the
5297 value is to be ignored. */
5299 static rtx
5300 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5302 tree id, decl;
5303 tree call;
5305 /* If we are not profiling, just call the function. */
5306 if (!profile_arc_flag)
5307 return NULL_RTX;
5309 /* Otherwise call the wrapper. This should be equivalent for the rest of
5310 compiler, so the code does not diverge, and the wrapper may run the
5311 code necessary for keeping the profiling sane. */
5313 switch (DECL_FUNCTION_CODE (fn))
5315 case BUILT_IN_FORK:
5316 id = get_identifier ("__gcov_fork");
5317 break;
5319 case BUILT_IN_EXECL:
5320 id = get_identifier ("__gcov_execl");
5321 break;
5323 case BUILT_IN_EXECV:
5324 id = get_identifier ("__gcov_execv");
5325 break;
5327 case BUILT_IN_EXECLP:
5328 id = get_identifier ("__gcov_execlp");
5329 break;
5331 case BUILT_IN_EXECLE:
5332 id = get_identifier ("__gcov_execle");
5333 break;
5335 case BUILT_IN_EXECVP:
5336 id = get_identifier ("__gcov_execvp");
5337 break;
5339 case BUILT_IN_EXECVE:
5340 id = get_identifier ("__gcov_execve");
5341 break;
5343 default:
5344 gcc_unreachable ();
5347 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5348 FUNCTION_DECL, id, TREE_TYPE (fn));
5349 DECL_EXTERNAL (decl) = 1;
5350 TREE_PUBLIC (decl) = 1;
5351 DECL_ARTIFICIAL (decl) = 1;
5352 TREE_NOTHROW (decl) = 1;
5353 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5354 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5355 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5356 return expand_call (call, target, ignore);
5361 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5362 the pointer in these functions is void*, the tree optimizers may remove
5363 casts. The mode computed in expand_builtin isn't reliable either, due
5364 to __sync_bool_compare_and_swap.
5366 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5367 group of builtins. This gives us log2 of the mode size. */
5369 static inline enum machine_mode
5370 get_builtin_sync_mode (int fcode_diff)
5372 /* The size is not negotiable, so ask not to get BLKmode in return
5373 if the target indicates that a smaller size would be better. */
5374 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5377 /* Expand the memory expression LOC and return the appropriate memory operand
5378 for the builtin_sync operations. */
5380 static rtx
5381 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5383 rtx addr, mem;
5385 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5386 addr = convert_memory_address (Pmode, addr);
5388 /* Note that we explicitly do not want any alias information for this
5389 memory, so that we kill all other live memories. Otherwise we don't
5390 satisfy the full barrier semantics of the intrinsic. */
5391 mem = validize_mem (gen_rtx_MEM (mode, addr));
5393 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5394 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5395 MEM_VOLATILE_P (mem) = 1;
5397 return mem;
5400 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5401 EXP is the CALL_EXPR. CODE is the rtx code
5402 that corresponds to the arithmetic or logical operation from the name;
5403 an exception here is that NOT actually means NAND. TARGET is an optional
5404 place for us to store the results; AFTER is true if this is the
5405 fetch_and_xxx form. IGNORE is true if we don't actually care about
5406 the result of the operation at all. */
5408 static rtx
5409 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5410 enum rtx_code code, bool after,
5411 rtx target, bool ignore)
5413 rtx val, mem;
5414 enum machine_mode old_mode;
5415 location_t loc = EXPR_LOCATION (exp);
5417 if (code == NOT && warn_sync_nand)
5419 tree fndecl = get_callee_fndecl (exp);
5420 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5422 static bool warned_f_a_n, warned_n_a_f;
5424 switch (fcode)
5426 case BUILT_IN_FETCH_AND_NAND_1:
5427 case BUILT_IN_FETCH_AND_NAND_2:
5428 case BUILT_IN_FETCH_AND_NAND_4:
5429 case BUILT_IN_FETCH_AND_NAND_8:
5430 case BUILT_IN_FETCH_AND_NAND_16:
5432 if (warned_f_a_n)
5433 break;
5435 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5436 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5437 warned_f_a_n = true;
5438 break;
5440 case BUILT_IN_NAND_AND_FETCH_1:
5441 case BUILT_IN_NAND_AND_FETCH_2:
5442 case BUILT_IN_NAND_AND_FETCH_4:
5443 case BUILT_IN_NAND_AND_FETCH_8:
5444 case BUILT_IN_NAND_AND_FETCH_16:
5446 if (warned_n_a_f)
5447 break;
5449 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5450 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5451 warned_n_a_f = true;
5452 break;
5454 default:
5455 gcc_unreachable ();
5459 /* Expand the operands. */
5460 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5462 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5463 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5464 of CONST_INTs, where we know the old_mode only from the call argument. */
5465 old_mode = GET_MODE (val);
5466 if (old_mode == VOIDmode)
5467 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5468 val = convert_modes (mode, old_mode, val, 1);
5470 if (ignore)
5471 return expand_sync_operation (mem, val, code);
5472 else
5473 return expand_sync_fetch_operation (mem, val, code, after, target);
5476 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5477 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5478 true if this is the boolean form. TARGET is a place for us to store the
5479 results; this is NOT optional if IS_BOOL is true. */
5481 static rtx
5482 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5483 bool is_bool, rtx target)
5485 rtx old_val, new_val, mem;
5486 enum machine_mode old_mode;
5488 /* Expand the operands. */
5489 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5492 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5493 mode, EXPAND_NORMAL);
5494 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5495 of CONST_INTs, where we know the old_mode only from the call argument. */
5496 old_mode = GET_MODE (old_val);
5497 if (old_mode == VOIDmode)
5498 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5499 old_val = convert_modes (mode, old_mode, old_val, 1);
5501 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5502 mode, EXPAND_NORMAL);
5503 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5504 of CONST_INTs, where we know the old_mode only from the call argument. */
5505 old_mode = GET_MODE (new_val);
5506 if (old_mode == VOIDmode)
5507 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5508 new_val = convert_modes (mode, old_mode, new_val, 1);
5510 if (is_bool)
5511 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5512 else
5513 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5516 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5517 general form is actually an atomic exchange, and some targets only
5518 support a reduced form with the second argument being a constant 1.
5519 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5520 the results. */
5522 static rtx
5523 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5524 rtx target)
5526 rtx val, mem;
5527 enum machine_mode old_mode;
5529 /* Expand the operands. */
5530 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5531 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5532 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5533 of CONST_INTs, where we know the old_mode only from the call argument. */
5534 old_mode = GET_MODE (val);
5535 if (old_mode == VOIDmode)
5536 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5537 val = convert_modes (mode, old_mode, val, 1);
5539 return expand_sync_lock_test_and_set (mem, val, target);
5542 /* Expand the __sync_synchronize intrinsic. */
5544 static void
5545 expand_builtin_synchronize (void)
5547 gimple x;
5548 VEC (tree, gc) *v_clobbers;
5550 #ifdef HAVE_memory_barrier
5551 if (HAVE_memory_barrier)
5553 emit_insn (gen_memory_barrier ());
5554 return;
5556 #endif
5558 if (synchronize_libfunc != NULL_RTX)
5560 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5561 return;
5564 /* If no explicit memory barrier instruction is available, create an
5565 empty asm stmt with a memory clobber. */
5566 v_clobbers = VEC_alloc (tree, gc, 1);
5567 VEC_quick_push (tree, v_clobbers,
5568 tree_cons (NULL, build_string (6, "memory"), NULL));
5569 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5570 gimple_asm_set_volatile (x, true);
5571 expand_asm_stmt (x);
5574 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5576 static void
5577 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5579 enum insn_code icode;
5580 rtx mem, insn;
5581 rtx val = const0_rtx;
5583 /* Expand the operands. */
5584 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5586 /* If there is an explicit operation in the md file, use it. */
5587 icode = sync_lock_release[mode];
5588 if (icode != CODE_FOR_nothing)
5590 if (!insn_data[icode].operand[1].predicate (val, mode))
5591 val = force_reg (mode, val);
5593 insn = GEN_FCN (icode) (mem, val);
5594 if (insn)
5596 emit_insn (insn);
5597 return;
5601 /* Otherwise we can implement this operation by emitting a barrier
5602 followed by a store of zero. */
5603 expand_builtin_synchronize ();
5604 emit_move_insn (mem, val);
5607 /* Expand an expression EXP that calls a built-in function,
5608 with result going to TARGET if that's convenient
5609 (and in mode MODE if that's convenient).
5610 SUBTARGET may be used as the target for computing one of EXP's operands.
5611 IGNORE is nonzero if the value is to be ignored. */
5614 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5615 int ignore)
5617 tree fndecl = get_callee_fndecl (exp);
5618 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5619 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5621 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5622 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5624 /* When not optimizing, generate calls to library functions for a certain
5625 set of builtins. */
5626 if (!optimize
5627 && !called_as_built_in (fndecl)
5628 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5629 && fcode != BUILT_IN_ALLOCA
5630 && fcode != BUILT_IN_FREE)
5631 return expand_call (exp, target, ignore);
5633 /* The built-in function expanders test for target == const0_rtx
5634 to determine whether the function's result will be ignored. */
5635 if (ignore)
5636 target = const0_rtx;
5638 /* If the result of a pure or const built-in function is ignored, and
5639 none of its arguments are volatile, we can avoid expanding the
5640 built-in call and just evaluate the arguments for side-effects. */
5641 if (target == const0_rtx
5642 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5644 bool volatilep = false;
5645 tree arg;
5646 call_expr_arg_iterator iter;
5648 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5649 if (TREE_THIS_VOLATILE (arg))
5651 volatilep = true;
5652 break;
5655 if (! volatilep)
5657 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5658 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5659 return const0_rtx;
5663 switch (fcode)
5665 CASE_FLT_FN (BUILT_IN_FABS):
5666 target = expand_builtin_fabs (exp, target, subtarget);
5667 if (target)
5668 return target;
5669 break;
5671 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5672 target = expand_builtin_copysign (exp, target, subtarget);
5673 if (target)
5674 return target;
5675 break;
5677 /* Just do a normal library call if we were unable to fold
5678 the values. */
5679 CASE_FLT_FN (BUILT_IN_CABS):
5680 break;
5682 CASE_FLT_FN (BUILT_IN_EXP):
5683 CASE_FLT_FN (BUILT_IN_EXP10):
5684 CASE_FLT_FN (BUILT_IN_POW10):
5685 CASE_FLT_FN (BUILT_IN_EXP2):
5686 CASE_FLT_FN (BUILT_IN_EXPM1):
5687 CASE_FLT_FN (BUILT_IN_LOGB):
5688 CASE_FLT_FN (BUILT_IN_LOG):
5689 CASE_FLT_FN (BUILT_IN_LOG10):
5690 CASE_FLT_FN (BUILT_IN_LOG2):
5691 CASE_FLT_FN (BUILT_IN_LOG1P):
5692 CASE_FLT_FN (BUILT_IN_TAN):
5693 CASE_FLT_FN (BUILT_IN_ASIN):
5694 CASE_FLT_FN (BUILT_IN_ACOS):
5695 CASE_FLT_FN (BUILT_IN_ATAN):
5696 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5697 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5698 because of possible accuracy problems. */
5699 if (! flag_unsafe_math_optimizations)
5700 break;
5701 CASE_FLT_FN (BUILT_IN_SQRT):
5702 CASE_FLT_FN (BUILT_IN_FLOOR):
5703 CASE_FLT_FN (BUILT_IN_CEIL):
5704 CASE_FLT_FN (BUILT_IN_TRUNC):
5705 CASE_FLT_FN (BUILT_IN_ROUND):
5706 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5707 CASE_FLT_FN (BUILT_IN_RINT):
5708 target = expand_builtin_mathfn (exp, target, subtarget);
5709 if (target)
5710 return target;
5711 break;
5713 CASE_FLT_FN (BUILT_IN_ILOGB):
5714 if (! flag_unsafe_math_optimizations)
5715 break;
5716 CASE_FLT_FN (BUILT_IN_ISINF):
5717 CASE_FLT_FN (BUILT_IN_FINITE):
5718 case BUILT_IN_ISFINITE:
5719 case BUILT_IN_ISNORMAL:
5720 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5721 if (target)
5722 return target;
5723 break;
5725 CASE_FLT_FN (BUILT_IN_LCEIL):
5726 CASE_FLT_FN (BUILT_IN_LLCEIL):
5727 CASE_FLT_FN (BUILT_IN_LFLOOR):
5728 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5729 target = expand_builtin_int_roundingfn (exp, target);
5730 if (target)
5731 return target;
5732 break;
5734 CASE_FLT_FN (BUILT_IN_LRINT):
5735 CASE_FLT_FN (BUILT_IN_LLRINT):
5736 CASE_FLT_FN (BUILT_IN_LROUND):
5737 CASE_FLT_FN (BUILT_IN_LLROUND):
5738 target = expand_builtin_int_roundingfn_2 (exp, target);
5739 if (target)
5740 return target;
5741 break;
5743 CASE_FLT_FN (BUILT_IN_POW):
5744 target = expand_builtin_pow (exp, target, subtarget);
5745 if (target)
5746 return target;
5747 break;
5749 CASE_FLT_FN (BUILT_IN_POWI):
5750 target = expand_builtin_powi (exp, target, subtarget);
5751 if (target)
5752 return target;
5753 break;
5755 CASE_FLT_FN (BUILT_IN_ATAN2):
5756 CASE_FLT_FN (BUILT_IN_LDEXP):
5757 CASE_FLT_FN (BUILT_IN_SCALB):
5758 CASE_FLT_FN (BUILT_IN_SCALBN):
5759 CASE_FLT_FN (BUILT_IN_SCALBLN):
5760 if (! flag_unsafe_math_optimizations)
5761 break;
5763 CASE_FLT_FN (BUILT_IN_FMOD):
5764 CASE_FLT_FN (BUILT_IN_REMAINDER):
5765 CASE_FLT_FN (BUILT_IN_DREM):
5766 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5767 if (target)
5768 return target;
5769 break;
5771 CASE_FLT_FN (BUILT_IN_CEXPI):
5772 target = expand_builtin_cexpi (exp, target, subtarget);
5773 gcc_assert (target);
5774 return target;
5776 CASE_FLT_FN (BUILT_IN_SIN):
5777 CASE_FLT_FN (BUILT_IN_COS):
5778 if (! flag_unsafe_math_optimizations)
5779 break;
5780 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5781 if (target)
5782 return target;
5783 break;
5785 CASE_FLT_FN (BUILT_IN_SINCOS):
5786 if (! flag_unsafe_math_optimizations)
5787 break;
5788 target = expand_builtin_sincos (exp);
5789 if (target)
5790 return target;
5791 break;
5793 case BUILT_IN_APPLY_ARGS:
5794 return expand_builtin_apply_args ();
5796 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5797 FUNCTION with a copy of the parameters described by
5798 ARGUMENTS, and ARGSIZE. It returns a block of memory
5799 allocated on the stack into which is stored all the registers
5800 that might possibly be used for returning the result of a
5801 function. ARGUMENTS is the value returned by
5802 __builtin_apply_args. ARGSIZE is the number of bytes of
5803 arguments that must be copied. ??? How should this value be
5804 computed? We'll also need a safe worst case value for varargs
5805 functions. */
5806 case BUILT_IN_APPLY:
5807 if (!validate_arglist (exp, POINTER_TYPE,
5808 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5809 && !validate_arglist (exp, REFERENCE_TYPE,
5810 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5811 return const0_rtx;
5812 else
5814 rtx ops[3];
5816 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5817 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5818 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5820 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5823 /* __builtin_return (RESULT) causes the function to return the
5824 value described by RESULT. RESULT is address of the block of
5825 memory returned by __builtin_apply. */
5826 case BUILT_IN_RETURN:
5827 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5828 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5829 return const0_rtx;
5831 case BUILT_IN_SAVEREGS:
5832 return expand_builtin_saveregs ();
5834 case BUILT_IN_ARGS_INFO:
5835 return expand_builtin_args_info (exp);
5837 case BUILT_IN_VA_ARG_PACK:
5838 /* All valid uses of __builtin_va_arg_pack () are removed during
5839 inlining. */
5840 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5841 return const0_rtx;
5843 case BUILT_IN_VA_ARG_PACK_LEN:
5844 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5845 inlining. */
5846 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5847 return const0_rtx;
5849 /* Return the address of the first anonymous stack arg. */
5850 case BUILT_IN_NEXT_ARG:
5851 if (fold_builtin_next_arg (exp, false))
5852 return const0_rtx;
5853 return expand_builtin_next_arg ();
5855 case BUILT_IN_CLEAR_CACHE:
5856 target = expand_builtin___clear_cache (exp);
5857 if (target)
5858 return target;
5859 break;
5861 case BUILT_IN_CLASSIFY_TYPE:
5862 return expand_builtin_classify_type (exp);
5864 case BUILT_IN_CONSTANT_P:
5865 return const0_rtx;
5867 case BUILT_IN_FRAME_ADDRESS:
5868 case BUILT_IN_RETURN_ADDRESS:
5869 return expand_builtin_frame_address (fndecl, exp);
5871 /* Returns the address of the area where the structure is returned.
5872 0 otherwise. */
5873 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5874 if (call_expr_nargs (exp) != 0
5875 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5876 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5877 return const0_rtx;
5878 else
5879 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5881 case BUILT_IN_ALLOCA:
5882 target = expand_builtin_alloca (exp, target);
5883 if (target)
5884 return target;
5885 break;
5887 case BUILT_IN_STACK_SAVE:
5888 return expand_stack_save ();
5890 case BUILT_IN_STACK_RESTORE:
5891 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5892 return const0_rtx;
5894 case BUILT_IN_BSWAP32:
5895 case BUILT_IN_BSWAP64:
5896 target = expand_builtin_bswap (exp, target, subtarget);
5898 if (target)
5899 return target;
5900 break;
5902 CASE_INT_FN (BUILT_IN_FFS):
5903 case BUILT_IN_FFSIMAX:
5904 target = expand_builtin_unop (target_mode, exp, target,
5905 subtarget, ffs_optab);
5906 if (target)
5907 return target;
5908 break;
5910 CASE_INT_FN (BUILT_IN_CLZ):
5911 case BUILT_IN_CLZIMAX:
5912 target = expand_builtin_unop (target_mode, exp, target,
5913 subtarget, clz_optab);
5914 if (target)
5915 return target;
5916 break;
5918 CASE_INT_FN (BUILT_IN_CTZ):
5919 case BUILT_IN_CTZIMAX:
5920 target = expand_builtin_unop (target_mode, exp, target,
5921 subtarget, ctz_optab);
5922 if (target)
5923 return target;
5924 break;
5926 CASE_INT_FN (BUILT_IN_POPCOUNT):
5927 case BUILT_IN_POPCOUNTIMAX:
5928 target = expand_builtin_unop (target_mode, exp, target,
5929 subtarget, popcount_optab);
5930 if (target)
5931 return target;
5932 break;
5934 CASE_INT_FN (BUILT_IN_PARITY):
5935 case BUILT_IN_PARITYIMAX:
5936 target = expand_builtin_unop (target_mode, exp, target,
5937 subtarget, parity_optab);
5938 if (target)
5939 return target;
5940 break;
5942 case BUILT_IN_STRLEN:
5943 target = expand_builtin_strlen (exp, target, target_mode);
5944 if (target)
5945 return target;
5946 break;
5948 case BUILT_IN_STRCPY:
5949 target = expand_builtin_strcpy (exp, target);
5950 if (target)
5951 return target;
5952 break;
5954 case BUILT_IN_STRNCPY:
5955 target = expand_builtin_strncpy (exp, target);
5956 if (target)
5957 return target;
5958 break;
5960 case BUILT_IN_STPCPY:
5961 target = expand_builtin_stpcpy (exp, target, mode);
5962 if (target)
5963 return target;
5964 break;
5966 case BUILT_IN_MEMCPY:
5967 target = expand_builtin_memcpy (exp, target);
5968 if (target)
5969 return target;
5970 break;
5972 case BUILT_IN_MEMPCPY:
5973 target = expand_builtin_mempcpy (exp, target, mode);
5974 if (target)
5975 return target;
5976 break;
5978 case BUILT_IN_MEMSET:
5979 target = expand_builtin_memset (exp, target, mode);
5980 if (target)
5981 return target;
5982 break;
5984 case BUILT_IN_BZERO:
5985 target = expand_builtin_bzero (exp);
5986 if (target)
5987 return target;
5988 break;
5990 case BUILT_IN_STRCMP:
5991 target = expand_builtin_strcmp (exp, target);
5992 if (target)
5993 return target;
5994 break;
5996 case BUILT_IN_STRNCMP:
5997 target = expand_builtin_strncmp (exp, target, mode);
5998 if (target)
5999 return target;
6000 break;
6002 case BUILT_IN_BCMP:
6003 case BUILT_IN_MEMCMP:
6004 target = expand_builtin_memcmp (exp, target, mode);
6005 if (target)
6006 return target;
6007 break;
6009 case BUILT_IN_SETJMP:
6010 /* This should have been lowered to the builtins below. */
6011 gcc_unreachable ();
6013 case BUILT_IN_SETJMP_SETUP:
6014 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6015 and the receiver label. */
6016 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6018 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6019 VOIDmode, EXPAND_NORMAL);
6020 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6021 rtx label_r = label_rtx (label);
6023 /* This is copied from the handling of non-local gotos. */
6024 expand_builtin_setjmp_setup (buf_addr, label_r);
6025 nonlocal_goto_handler_labels
6026 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6027 nonlocal_goto_handler_labels);
6028 /* ??? Do not let expand_label treat us as such since we would
6029 not want to be both on the list of non-local labels and on
6030 the list of forced labels. */
6031 FORCED_LABEL (label) = 0;
6032 return const0_rtx;
6034 break;
6036 case BUILT_IN_SETJMP_DISPATCHER:
6037 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6038 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6040 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6041 rtx label_r = label_rtx (label);
6043 /* Remove the dispatcher label from the list of non-local labels
6044 since the receiver labels have been added to it above. */
6045 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6046 return const0_rtx;
6048 break;
6050 case BUILT_IN_SETJMP_RECEIVER:
6051 /* __builtin_setjmp_receiver is passed the receiver label. */
6052 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6054 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6055 rtx label_r = label_rtx (label);
6057 expand_builtin_setjmp_receiver (label_r);
6058 return const0_rtx;
6060 break;
6062 /* __builtin_longjmp is passed a pointer to an array of five words.
6063 It's similar to the C library longjmp function but works with
6064 __builtin_setjmp above. */
6065 case BUILT_IN_LONGJMP:
6066 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6068 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6069 VOIDmode, EXPAND_NORMAL);
6070 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6072 if (value != const1_rtx)
6074 error ("%<__builtin_longjmp%> second argument must be 1");
6075 return const0_rtx;
6078 expand_builtin_longjmp (buf_addr, value);
6079 return const0_rtx;
6081 break;
6083 case BUILT_IN_NONLOCAL_GOTO:
6084 target = expand_builtin_nonlocal_goto (exp);
6085 if (target)
6086 return target;
6087 break;
6089 /* This updates the setjmp buffer that is its argument with the value
6090 of the current stack pointer. */
6091 case BUILT_IN_UPDATE_SETJMP_BUF:
6092 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6094 rtx buf_addr
6095 = expand_normal (CALL_EXPR_ARG (exp, 0));
6097 expand_builtin_update_setjmp_buf (buf_addr);
6098 return const0_rtx;
6100 break;
6102 case BUILT_IN_TRAP:
6103 expand_builtin_trap ();
6104 return const0_rtx;
6106 case BUILT_IN_UNREACHABLE:
6107 expand_builtin_unreachable ();
6108 return const0_rtx;
6110 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6111 case BUILT_IN_SIGNBITD32:
6112 case BUILT_IN_SIGNBITD64:
6113 case BUILT_IN_SIGNBITD128:
6114 target = expand_builtin_signbit (exp, target);
6115 if (target)
6116 return target;
6117 break;
6119 /* Various hooks for the DWARF 2 __throw routine. */
6120 case BUILT_IN_UNWIND_INIT:
6121 expand_builtin_unwind_init ();
6122 return const0_rtx;
6123 case BUILT_IN_DWARF_CFA:
6124 return virtual_cfa_rtx;
6125 #ifdef DWARF2_UNWIND_INFO
6126 case BUILT_IN_DWARF_SP_COLUMN:
6127 return expand_builtin_dwarf_sp_column ();
6128 case BUILT_IN_INIT_DWARF_REG_SIZES:
6129 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6130 return const0_rtx;
6131 #endif
6132 case BUILT_IN_FROB_RETURN_ADDR:
6133 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6134 case BUILT_IN_EXTRACT_RETURN_ADDR:
6135 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6136 case BUILT_IN_EH_RETURN:
6137 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6138 CALL_EXPR_ARG (exp, 1));
6139 return const0_rtx;
6140 #ifdef EH_RETURN_DATA_REGNO
6141 case BUILT_IN_EH_RETURN_DATA_REGNO:
6142 return expand_builtin_eh_return_data_regno (exp);
6143 #endif
6144 case BUILT_IN_EXTEND_POINTER:
6145 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6146 case BUILT_IN_EH_POINTER:
6147 return expand_builtin_eh_pointer (exp);
6148 case BUILT_IN_EH_FILTER:
6149 return expand_builtin_eh_filter (exp);
6150 case BUILT_IN_EH_COPY_VALUES:
6151 return expand_builtin_eh_copy_values (exp);
6153 case BUILT_IN_VA_START:
6154 return expand_builtin_va_start (exp);
6155 case BUILT_IN_VA_END:
6156 return expand_builtin_va_end (exp);
6157 case BUILT_IN_VA_COPY:
6158 return expand_builtin_va_copy (exp);
6159 case BUILT_IN_EXPECT:
6160 return expand_builtin_expect (exp, target);
6161 case BUILT_IN_PREFETCH:
6162 expand_builtin_prefetch (exp);
6163 return const0_rtx;
6165 case BUILT_IN_PROFILE_FUNC_ENTER:
6166 return expand_builtin_profile_func (false);
6167 case BUILT_IN_PROFILE_FUNC_EXIT:
6168 return expand_builtin_profile_func (true);
6170 case BUILT_IN_INIT_TRAMPOLINE:
6171 return expand_builtin_init_trampoline (exp);
6172 case BUILT_IN_ADJUST_TRAMPOLINE:
6173 return expand_builtin_adjust_trampoline (exp);
6175 case BUILT_IN_FORK:
6176 case BUILT_IN_EXECL:
6177 case BUILT_IN_EXECV:
6178 case BUILT_IN_EXECLP:
6179 case BUILT_IN_EXECLE:
6180 case BUILT_IN_EXECVP:
6181 case BUILT_IN_EXECVE:
6182 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6183 if (target)
6184 return target;
6185 break;
6187 case BUILT_IN_FETCH_AND_ADD_1:
6188 case BUILT_IN_FETCH_AND_ADD_2:
6189 case BUILT_IN_FETCH_AND_ADD_4:
6190 case BUILT_IN_FETCH_AND_ADD_8:
6191 case BUILT_IN_FETCH_AND_ADD_16:
6192 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6193 target = expand_builtin_sync_operation (mode, exp, PLUS,
6194 false, target, ignore);
6195 if (target)
6196 return target;
6197 break;
6199 case BUILT_IN_FETCH_AND_SUB_1:
6200 case BUILT_IN_FETCH_AND_SUB_2:
6201 case BUILT_IN_FETCH_AND_SUB_4:
6202 case BUILT_IN_FETCH_AND_SUB_8:
6203 case BUILT_IN_FETCH_AND_SUB_16:
6204 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6205 target = expand_builtin_sync_operation (mode, exp, MINUS,
6206 false, target, ignore);
6207 if (target)
6208 return target;
6209 break;
6211 case BUILT_IN_FETCH_AND_OR_1:
6212 case BUILT_IN_FETCH_AND_OR_2:
6213 case BUILT_IN_FETCH_AND_OR_4:
6214 case BUILT_IN_FETCH_AND_OR_8:
6215 case BUILT_IN_FETCH_AND_OR_16:
6216 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6217 target = expand_builtin_sync_operation (mode, exp, IOR,
6218 false, target, ignore);
6219 if (target)
6220 return target;
6221 break;
6223 case BUILT_IN_FETCH_AND_AND_1:
6224 case BUILT_IN_FETCH_AND_AND_2:
6225 case BUILT_IN_FETCH_AND_AND_4:
6226 case BUILT_IN_FETCH_AND_AND_8:
6227 case BUILT_IN_FETCH_AND_AND_16:
6228 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6229 target = expand_builtin_sync_operation (mode, exp, AND,
6230 false, target, ignore);
6231 if (target)
6232 return target;
6233 break;
6235 case BUILT_IN_FETCH_AND_XOR_1:
6236 case BUILT_IN_FETCH_AND_XOR_2:
6237 case BUILT_IN_FETCH_AND_XOR_4:
6238 case BUILT_IN_FETCH_AND_XOR_8:
6239 case BUILT_IN_FETCH_AND_XOR_16:
6240 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6241 target = expand_builtin_sync_operation (mode, exp, XOR,
6242 false, target, ignore);
6243 if (target)
6244 return target;
6245 break;
6247 case BUILT_IN_FETCH_AND_NAND_1:
6248 case BUILT_IN_FETCH_AND_NAND_2:
6249 case BUILT_IN_FETCH_AND_NAND_4:
6250 case BUILT_IN_FETCH_AND_NAND_8:
6251 case BUILT_IN_FETCH_AND_NAND_16:
6252 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6253 target = expand_builtin_sync_operation (mode, exp, NOT,
6254 false, target, ignore);
6255 if (target)
6256 return target;
6257 break;
6259 case BUILT_IN_ADD_AND_FETCH_1:
6260 case BUILT_IN_ADD_AND_FETCH_2:
6261 case BUILT_IN_ADD_AND_FETCH_4:
6262 case BUILT_IN_ADD_AND_FETCH_8:
6263 case BUILT_IN_ADD_AND_FETCH_16:
6264 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6265 target = expand_builtin_sync_operation (mode, exp, PLUS,
6266 true, target, ignore);
6267 if (target)
6268 return target;
6269 break;
6271 case BUILT_IN_SUB_AND_FETCH_1:
6272 case BUILT_IN_SUB_AND_FETCH_2:
6273 case BUILT_IN_SUB_AND_FETCH_4:
6274 case BUILT_IN_SUB_AND_FETCH_8:
6275 case BUILT_IN_SUB_AND_FETCH_16:
6276 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6277 target = expand_builtin_sync_operation (mode, exp, MINUS,
6278 true, target, ignore);
6279 if (target)
6280 return target;
6281 break;
6283 case BUILT_IN_OR_AND_FETCH_1:
6284 case BUILT_IN_OR_AND_FETCH_2:
6285 case BUILT_IN_OR_AND_FETCH_4:
6286 case BUILT_IN_OR_AND_FETCH_8:
6287 case BUILT_IN_OR_AND_FETCH_16:
6288 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6289 target = expand_builtin_sync_operation (mode, exp, IOR,
6290 true, target, ignore);
6291 if (target)
6292 return target;
6293 break;
6295 case BUILT_IN_AND_AND_FETCH_1:
6296 case BUILT_IN_AND_AND_FETCH_2:
6297 case BUILT_IN_AND_AND_FETCH_4:
6298 case BUILT_IN_AND_AND_FETCH_8:
6299 case BUILT_IN_AND_AND_FETCH_16:
6300 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6301 target = expand_builtin_sync_operation (mode, exp, AND,
6302 true, target, ignore);
6303 if (target)
6304 return target;
6305 break;
6307 case BUILT_IN_XOR_AND_FETCH_1:
6308 case BUILT_IN_XOR_AND_FETCH_2:
6309 case BUILT_IN_XOR_AND_FETCH_4:
6310 case BUILT_IN_XOR_AND_FETCH_8:
6311 case BUILT_IN_XOR_AND_FETCH_16:
6312 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6313 target = expand_builtin_sync_operation (mode, exp, XOR,
6314 true, target, ignore);
6315 if (target)
6316 return target;
6317 break;
6319 case BUILT_IN_NAND_AND_FETCH_1:
6320 case BUILT_IN_NAND_AND_FETCH_2:
6321 case BUILT_IN_NAND_AND_FETCH_4:
6322 case BUILT_IN_NAND_AND_FETCH_8:
6323 case BUILT_IN_NAND_AND_FETCH_16:
6324 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6325 target = expand_builtin_sync_operation (mode, exp, NOT,
6326 true, target, ignore);
6327 if (target)
6328 return target;
6329 break;
6331 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6332 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6333 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6334 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6335 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6336 if (mode == VOIDmode)
6337 mode = TYPE_MODE (boolean_type_node);
6338 if (!target || !register_operand (target, mode))
6339 target = gen_reg_rtx (mode);
6341 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6342 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6343 if (target)
6344 return target;
6345 break;
6347 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6348 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6349 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6350 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6351 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6352 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6353 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6354 if (target)
6355 return target;
6356 break;
6358 case BUILT_IN_LOCK_TEST_AND_SET_1:
6359 case BUILT_IN_LOCK_TEST_AND_SET_2:
6360 case BUILT_IN_LOCK_TEST_AND_SET_4:
6361 case BUILT_IN_LOCK_TEST_AND_SET_8:
6362 case BUILT_IN_LOCK_TEST_AND_SET_16:
6363 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6364 target = expand_builtin_lock_test_and_set (mode, exp, target);
6365 if (target)
6366 return target;
6367 break;
6369 case BUILT_IN_LOCK_RELEASE_1:
6370 case BUILT_IN_LOCK_RELEASE_2:
6371 case BUILT_IN_LOCK_RELEASE_4:
6372 case BUILT_IN_LOCK_RELEASE_8:
6373 case BUILT_IN_LOCK_RELEASE_16:
6374 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6375 expand_builtin_lock_release (mode, exp);
6376 return const0_rtx;
6378 case BUILT_IN_SYNCHRONIZE:
6379 expand_builtin_synchronize ();
6380 return const0_rtx;
6382 case BUILT_IN_OBJECT_SIZE:
6383 return expand_builtin_object_size (exp);
6385 case BUILT_IN_MEMCPY_CHK:
6386 case BUILT_IN_MEMPCPY_CHK:
6387 case BUILT_IN_MEMMOVE_CHK:
6388 case BUILT_IN_MEMSET_CHK:
6389 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6390 if (target)
6391 return target;
6392 break;
6394 case BUILT_IN_STRCPY_CHK:
6395 case BUILT_IN_STPCPY_CHK:
6396 case BUILT_IN_STRNCPY_CHK:
6397 case BUILT_IN_STRCAT_CHK:
6398 case BUILT_IN_STRNCAT_CHK:
6399 case BUILT_IN_SNPRINTF_CHK:
6400 case BUILT_IN_VSNPRINTF_CHK:
6401 maybe_emit_chk_warning (exp, fcode);
6402 break;
6404 case BUILT_IN_SPRINTF_CHK:
6405 case BUILT_IN_VSPRINTF_CHK:
6406 maybe_emit_sprintf_chk_warning (exp, fcode);
6407 break;
6409 case BUILT_IN_FREE:
6410 maybe_emit_free_warning (exp);
6411 break;
6413 default: /* just do library call, if unknown builtin */
6414 break;
6417 /* The switch statement above can drop through to cause the function
6418 to be called normally. */
6419 return expand_call (exp, target, ignore);
6422 /* Determine whether a tree node represents a call to a built-in
6423 function. If the tree T is a call to a built-in function with
6424 the right number of arguments of the appropriate types, return
6425 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6426 Otherwise the return value is END_BUILTINS. */
6428 enum built_in_function
6429 builtin_mathfn_code (const_tree t)
6431 const_tree fndecl, arg, parmlist;
6432 const_tree argtype, parmtype;
6433 const_call_expr_arg_iterator iter;
6435 if (TREE_CODE (t) != CALL_EXPR
6436 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6437 return END_BUILTINS;
6439 fndecl = get_callee_fndecl (t);
6440 if (fndecl == NULL_TREE
6441 || TREE_CODE (fndecl) != FUNCTION_DECL
6442 || ! DECL_BUILT_IN (fndecl)
6443 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6444 return END_BUILTINS;
6446 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6447 init_const_call_expr_arg_iterator (t, &iter);
6448 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6450 /* If a function doesn't take a variable number of arguments,
6451 the last element in the list will have type `void'. */
6452 parmtype = TREE_VALUE (parmlist);
6453 if (VOID_TYPE_P (parmtype))
6455 if (more_const_call_expr_args_p (&iter))
6456 return END_BUILTINS;
6457 return DECL_FUNCTION_CODE (fndecl);
6460 if (! more_const_call_expr_args_p (&iter))
6461 return END_BUILTINS;
6463 arg = next_const_call_expr_arg (&iter);
6464 argtype = TREE_TYPE (arg);
6466 if (SCALAR_FLOAT_TYPE_P (parmtype))
6468 if (! SCALAR_FLOAT_TYPE_P (argtype))
6469 return END_BUILTINS;
6471 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6473 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6474 return END_BUILTINS;
6476 else if (POINTER_TYPE_P (parmtype))
6478 if (! POINTER_TYPE_P (argtype))
6479 return END_BUILTINS;
6481 else if (INTEGRAL_TYPE_P (parmtype))
6483 if (! INTEGRAL_TYPE_P (argtype))
6484 return END_BUILTINS;
6486 else
6487 return END_BUILTINS;
6490 /* Variable-length argument list. */
6491 return DECL_FUNCTION_CODE (fndecl);
6494 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6495 evaluate to a constant. */
6497 static tree
6498 fold_builtin_constant_p (tree arg)
6500 /* We return 1 for a numeric type that's known to be a constant
6501 value at compile-time or for an aggregate type that's a
6502 literal constant. */
6503 STRIP_NOPS (arg);
6505 /* If we know this is a constant, emit the constant of one. */
6506 if (CONSTANT_CLASS_P (arg)
6507 || (TREE_CODE (arg) == CONSTRUCTOR
6508 && TREE_CONSTANT (arg)))
6509 return integer_one_node;
6510 if (TREE_CODE (arg) == ADDR_EXPR)
6512 tree op = TREE_OPERAND (arg, 0);
6513 if (TREE_CODE (op) == STRING_CST
6514 || (TREE_CODE (op) == ARRAY_REF
6515 && integer_zerop (TREE_OPERAND (op, 1))
6516 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6517 return integer_one_node;
6520 /* If this expression has side effects, show we don't know it to be a
6521 constant. Likewise if it's a pointer or aggregate type since in
6522 those case we only want literals, since those are only optimized
6523 when generating RTL, not later.
6524 And finally, if we are compiling an initializer, not code, we
6525 need to return a definite result now; there's not going to be any
6526 more optimization done. */
6527 if (TREE_SIDE_EFFECTS (arg)
6528 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6529 || POINTER_TYPE_P (TREE_TYPE (arg))
6530 || cfun == 0
6531 || folding_initializer)
6532 return integer_zero_node;
6534 return NULL_TREE;
6537 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6538 return it as a truthvalue. */
6540 static tree
6541 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6543 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6545 fn = built_in_decls[BUILT_IN_EXPECT];
6546 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6547 ret_type = TREE_TYPE (TREE_TYPE (fn));
6548 pred_type = TREE_VALUE (arg_types);
6549 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6551 pred = fold_convert_loc (loc, pred_type, pred);
6552 expected = fold_convert_loc (loc, expected_type, expected);
6553 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6555 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6556 build_int_cst (ret_type, 0));
6559 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6560 NULL_TREE if no simplification is possible. */
6562 static tree
6563 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6565 tree inner, fndecl;
6566 enum tree_code code;
6568 /* If this is a builtin_expect within a builtin_expect keep the
6569 inner one. See through a comparison against a constant. It
6570 might have been added to create a thruthvalue. */
6571 inner = arg0;
6572 if (COMPARISON_CLASS_P (inner)
6573 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6574 inner = TREE_OPERAND (inner, 0);
6576 if (TREE_CODE (inner) == CALL_EXPR
6577 && (fndecl = get_callee_fndecl (inner))
6578 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6579 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6580 return arg0;
6582 /* Distribute the expected value over short-circuiting operators.
6583 See through the cast from truthvalue_type_node to long. */
6584 inner = arg0;
6585 while (TREE_CODE (inner) == NOP_EXPR
6586 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6587 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6588 inner = TREE_OPERAND (inner, 0);
6590 code = TREE_CODE (inner);
6591 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6593 tree op0 = TREE_OPERAND (inner, 0);
6594 tree op1 = TREE_OPERAND (inner, 1);
6596 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6597 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6598 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6600 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6603 /* If the argument isn't invariant then there's nothing else we can do. */
6604 if (!TREE_CONSTANT (arg0))
6605 return NULL_TREE;
6607 /* If we expect that a comparison against the argument will fold to
6608 a constant return the constant. In practice, this means a true
6609 constant or the address of a non-weak symbol. */
6610 inner = arg0;
6611 STRIP_NOPS (inner);
6612 if (TREE_CODE (inner) == ADDR_EXPR)
6616 inner = TREE_OPERAND (inner, 0);
6618 while (TREE_CODE (inner) == COMPONENT_REF
6619 || TREE_CODE (inner) == ARRAY_REF);
6620 if ((TREE_CODE (inner) == VAR_DECL
6621 || TREE_CODE (inner) == FUNCTION_DECL)
6622 && DECL_WEAK (inner))
6623 return NULL_TREE;
6626 /* Otherwise, ARG0 already has the proper type for the return value. */
6627 return arg0;
6630 /* Fold a call to __builtin_classify_type with argument ARG. */
6632 static tree
6633 fold_builtin_classify_type (tree arg)
6635 if (arg == 0)
6636 return build_int_cst (NULL_TREE, no_type_class);
6638 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6641 /* Fold a call to __builtin_strlen with argument ARG. */
6643 static tree
6644 fold_builtin_strlen (location_t loc, tree type, tree arg)
6646 if (!validate_arg (arg, POINTER_TYPE))
6647 return NULL_TREE;
6648 else
6650 tree len = c_strlen (arg, 0);
6652 if (len)
6653 return fold_convert_loc (loc, type, len);
6655 return NULL_TREE;
6659 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6661 static tree
6662 fold_builtin_inf (location_t loc, tree type, int warn)
6664 REAL_VALUE_TYPE real;
6666 /* __builtin_inff is intended to be usable to define INFINITY on all
6667 targets. If an infinity is not available, INFINITY expands "to a
6668 positive constant of type float that overflows at translation
6669 time", footnote "In this case, using INFINITY will violate the
6670 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6671 Thus we pedwarn to ensure this constraint violation is
6672 diagnosed. */
6673 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6674 pedwarn (loc, 0, "target format does not support infinity");
6676 real_inf (&real);
6677 return build_real (type, real);
6680 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6682 static tree
6683 fold_builtin_nan (tree arg, tree type, int quiet)
6685 REAL_VALUE_TYPE real;
6686 const char *str;
6688 if (!validate_arg (arg, POINTER_TYPE))
6689 return NULL_TREE;
6690 str = c_getstr (arg);
6691 if (!str)
6692 return NULL_TREE;
6694 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6695 return NULL_TREE;
6697 return build_real (type, real);
6700 /* Return true if the floating point expression T has an integer value.
6701 We also allow +Inf, -Inf and NaN to be considered integer values. */
6703 static bool
6704 integer_valued_real_p (tree t)
6706 switch (TREE_CODE (t))
6708 case FLOAT_EXPR:
6709 return true;
6711 case ABS_EXPR:
6712 case SAVE_EXPR:
6713 return integer_valued_real_p (TREE_OPERAND (t, 0));
6715 case COMPOUND_EXPR:
6716 case MODIFY_EXPR:
6717 case BIND_EXPR:
6718 return integer_valued_real_p (TREE_OPERAND (t, 1));
6720 case PLUS_EXPR:
6721 case MINUS_EXPR:
6722 case MULT_EXPR:
6723 case MIN_EXPR:
6724 case MAX_EXPR:
6725 return integer_valued_real_p (TREE_OPERAND (t, 0))
6726 && integer_valued_real_p (TREE_OPERAND (t, 1));
6728 case COND_EXPR:
6729 return integer_valued_real_p (TREE_OPERAND (t, 1))
6730 && integer_valued_real_p (TREE_OPERAND (t, 2));
6732 case REAL_CST:
6733 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6735 case NOP_EXPR:
6737 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6738 if (TREE_CODE (type) == INTEGER_TYPE)
6739 return true;
6740 if (TREE_CODE (type) == REAL_TYPE)
6741 return integer_valued_real_p (TREE_OPERAND (t, 0));
6742 break;
6745 case CALL_EXPR:
6746 switch (builtin_mathfn_code (t))
6748 CASE_FLT_FN (BUILT_IN_CEIL):
6749 CASE_FLT_FN (BUILT_IN_FLOOR):
6750 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6751 CASE_FLT_FN (BUILT_IN_RINT):
6752 CASE_FLT_FN (BUILT_IN_ROUND):
6753 CASE_FLT_FN (BUILT_IN_TRUNC):
6754 return true;
6756 CASE_FLT_FN (BUILT_IN_FMIN):
6757 CASE_FLT_FN (BUILT_IN_FMAX):
6758 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6759 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6761 default:
6762 break;
6764 break;
6766 default:
6767 break;
6769 return false;
6772 /* FNDECL is assumed to be a builtin where truncation can be propagated
6773 across (for instance floor((double)f) == (double)floorf (f).
6774 Do the transformation for a call with argument ARG. */
6776 static tree
6777 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6779 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6781 if (!validate_arg (arg, REAL_TYPE))
6782 return NULL_TREE;
6784 /* Integer rounding functions are idempotent. */
6785 if (fcode == builtin_mathfn_code (arg))
6786 return arg;
6788 /* If argument is already integer valued, and we don't need to worry
6789 about setting errno, there's no need to perform rounding. */
6790 if (! flag_errno_math && integer_valued_real_p (arg))
6791 return arg;
6793 if (optimize)
6795 tree arg0 = strip_float_extensions (arg);
6796 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6797 tree newtype = TREE_TYPE (arg0);
6798 tree decl;
6800 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6801 && (decl = mathfn_built_in (newtype, fcode)))
6802 return fold_convert_loc (loc, ftype,
6803 build_call_expr_loc (loc, decl, 1,
6804 fold_convert_loc (loc,
6805 newtype,
6806 arg0)));
6808 return NULL_TREE;
6811 /* FNDECL is assumed to be builtin which can narrow the FP type of
6812 the argument, for instance lround((double)f) -> lroundf (f).
6813 Do the transformation for a call with argument ARG. */
6815 static tree
6816 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6818 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6820 if (!validate_arg (arg, REAL_TYPE))
6821 return NULL_TREE;
6823 /* If argument is already integer valued, and we don't need to worry
6824 about setting errno, there's no need to perform rounding. */
6825 if (! flag_errno_math && integer_valued_real_p (arg))
6826 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6827 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6829 if (optimize)
6831 tree ftype = TREE_TYPE (arg);
6832 tree arg0 = strip_float_extensions (arg);
6833 tree newtype = TREE_TYPE (arg0);
6834 tree decl;
6836 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6837 && (decl = mathfn_built_in (newtype, fcode)))
6838 return build_call_expr_loc (loc, decl, 1,
6839 fold_convert_loc (loc, newtype, arg0));
6842 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6843 sizeof (long long) == sizeof (long). */
6844 if (TYPE_PRECISION (long_long_integer_type_node)
6845 == TYPE_PRECISION (long_integer_type_node))
6847 tree newfn = NULL_TREE;
6848 switch (fcode)
6850 CASE_FLT_FN (BUILT_IN_LLCEIL):
6851 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6852 break;
6854 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6855 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6856 break;
6858 CASE_FLT_FN (BUILT_IN_LLROUND):
6859 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6860 break;
6862 CASE_FLT_FN (BUILT_IN_LLRINT):
6863 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6864 break;
6866 default:
6867 break;
6870 if (newfn)
6872 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6873 return fold_convert_loc (loc,
6874 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6878 return NULL_TREE;
6881 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6882 return type. Return NULL_TREE if no simplification can be made. */
6884 static tree
6885 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6887 tree res;
6889 if (!validate_arg (arg, COMPLEX_TYPE)
6890 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6891 return NULL_TREE;
6893 /* Calculate the result when the argument is a constant. */
6894 if (TREE_CODE (arg) == COMPLEX_CST
6895 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6896 type, mpfr_hypot)))
6897 return res;
6899 if (TREE_CODE (arg) == COMPLEX_EXPR)
6901 tree real = TREE_OPERAND (arg, 0);
6902 tree imag = TREE_OPERAND (arg, 1);
6904 /* If either part is zero, cabs is fabs of the other. */
6905 if (real_zerop (real))
6906 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6907 if (real_zerop (imag))
6908 return fold_build1_loc (loc, ABS_EXPR, type, real);
6910 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6911 if (flag_unsafe_math_optimizations
6912 && operand_equal_p (real, imag, OEP_PURE_SAME))
6914 const REAL_VALUE_TYPE sqrt2_trunc
6915 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6916 STRIP_NOPS (real);
6917 return fold_build2_loc (loc, MULT_EXPR, type,
6918 fold_build1_loc (loc, ABS_EXPR, type, real),
6919 build_real (type, sqrt2_trunc));
6923 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6924 if (TREE_CODE (arg) == NEGATE_EXPR
6925 || TREE_CODE (arg) == CONJ_EXPR)
6926 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6928 /* Don't do this when optimizing for size. */
6929 if (flag_unsafe_math_optimizations
6930 && optimize && optimize_function_for_speed_p (cfun))
6932 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6934 if (sqrtfn != NULL_TREE)
6936 tree rpart, ipart, result;
6938 arg = builtin_save_expr (arg);
6940 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6941 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6943 rpart = builtin_save_expr (rpart);
6944 ipart = builtin_save_expr (ipart);
6946 result = fold_build2_loc (loc, PLUS_EXPR, type,
6947 fold_build2_loc (loc, MULT_EXPR, type,
6948 rpart, rpart),
6949 fold_build2_loc (loc, MULT_EXPR, type,
6950 ipart, ipart));
6952 return build_call_expr_loc (loc, sqrtfn, 1, result);
6956 return NULL_TREE;
6959 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6960 Return NULL_TREE if no simplification can be made. */
6962 static tree
6963 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6966 enum built_in_function fcode;
6967 tree res;
6969 if (!validate_arg (arg, REAL_TYPE))
6970 return NULL_TREE;
6972 /* Calculate the result when the argument is a constant. */
6973 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6974 return res;
6976 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6977 fcode = builtin_mathfn_code (arg);
6978 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6980 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6981 arg = fold_build2_loc (loc, MULT_EXPR, type,
6982 CALL_EXPR_ARG (arg, 0),
6983 build_real (type, dconsthalf));
6984 return build_call_expr_loc (loc, expfn, 1, arg);
6987 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6988 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6990 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6992 if (powfn)
6994 tree arg0 = CALL_EXPR_ARG (arg, 0);
6995 tree tree_root;
6996 /* The inner root was either sqrt or cbrt. */
6997 /* This was a conditional expression but it triggered a bug
6998 in Sun C 5.5. */
6999 REAL_VALUE_TYPE dconstroot;
7000 if (BUILTIN_SQRT_P (fcode))
7001 dconstroot = dconsthalf;
7002 else
7003 dconstroot = dconst_third ();
7005 /* Adjust for the outer root. */
7006 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7007 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7008 tree_root = build_real (type, dconstroot);
7009 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7013 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7014 if (flag_unsafe_math_optimizations
7015 && (fcode == BUILT_IN_POW
7016 || fcode == BUILT_IN_POWF
7017 || fcode == BUILT_IN_POWL))
7019 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7020 tree arg0 = CALL_EXPR_ARG (arg, 0);
7021 tree arg1 = CALL_EXPR_ARG (arg, 1);
7022 tree narg1;
7023 if (!tree_expr_nonnegative_p (arg0))
7024 arg0 = build1 (ABS_EXPR, type, arg0);
7025 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7026 build_real (type, dconsthalf));
7027 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7030 return NULL_TREE;
7033 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7034 Return NULL_TREE if no simplification can be made. */
7036 static tree
7037 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7039 const enum built_in_function fcode = builtin_mathfn_code (arg);
7040 tree res;
7042 if (!validate_arg (arg, REAL_TYPE))
7043 return NULL_TREE;
7045 /* Calculate the result when the argument is a constant. */
7046 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7047 return res;
7049 if (flag_unsafe_math_optimizations)
7051 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7052 if (BUILTIN_EXPONENT_P (fcode))
7054 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7055 const REAL_VALUE_TYPE third_trunc =
7056 real_value_truncate (TYPE_MODE (type), dconst_third ());
7057 arg = fold_build2_loc (loc, MULT_EXPR, type,
7058 CALL_EXPR_ARG (arg, 0),
7059 build_real (type, third_trunc));
7060 return build_call_expr_loc (loc, expfn, 1, arg);
7063 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7064 if (BUILTIN_SQRT_P (fcode))
7066 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7068 if (powfn)
7070 tree arg0 = CALL_EXPR_ARG (arg, 0);
7071 tree tree_root;
7072 REAL_VALUE_TYPE dconstroot = dconst_third ();
7074 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7075 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7076 tree_root = build_real (type, dconstroot);
7077 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7081 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7082 if (BUILTIN_CBRT_P (fcode))
7084 tree arg0 = CALL_EXPR_ARG (arg, 0);
7085 if (tree_expr_nonnegative_p (arg0))
7087 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7089 if (powfn)
7091 tree tree_root;
7092 REAL_VALUE_TYPE dconstroot;
7094 real_arithmetic (&dconstroot, MULT_EXPR,
7095 dconst_third_ptr (), dconst_third_ptr ());
7096 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7097 tree_root = build_real (type, dconstroot);
7098 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7103 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7104 if (fcode == BUILT_IN_POW
7105 || fcode == BUILT_IN_POWF
7106 || fcode == BUILT_IN_POWL)
7108 tree arg00 = CALL_EXPR_ARG (arg, 0);
7109 tree arg01 = CALL_EXPR_ARG (arg, 1);
7110 if (tree_expr_nonnegative_p (arg00))
7112 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7113 const REAL_VALUE_TYPE dconstroot
7114 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7115 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7116 build_real (type, dconstroot));
7117 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7121 return NULL_TREE;
7124 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7125 TYPE is the type of the return value. Return NULL_TREE if no
7126 simplification can be made. */
7128 static tree
7129 fold_builtin_cos (location_t loc,
7130 tree arg, tree type, tree fndecl)
7132 tree res, narg;
7134 if (!validate_arg (arg, REAL_TYPE))
7135 return NULL_TREE;
7137 /* Calculate the result when the argument is a constant. */
7138 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7139 return res;
7141 /* Optimize cos(-x) into cos (x). */
7142 if ((narg = fold_strip_sign_ops (arg)))
7143 return build_call_expr_loc (loc, fndecl, 1, narg);
7145 return NULL_TREE;
7148 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7149 Return NULL_TREE if no simplification can be made. */
7151 static tree
7152 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7154 if (validate_arg (arg, REAL_TYPE))
7156 tree res, narg;
7158 /* Calculate the result when the argument is a constant. */
7159 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7160 return res;
7162 /* Optimize cosh(-x) into cosh (x). */
7163 if ((narg = fold_strip_sign_ops (arg)))
7164 return build_call_expr_loc (loc, fndecl, 1, narg);
7167 return NULL_TREE;
7170 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7171 argument ARG. TYPE is the type of the return value. Return
7172 NULL_TREE if no simplification can be made. */
7174 static tree
7175 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7176 bool hyper)
7178 if (validate_arg (arg, COMPLEX_TYPE)
7179 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7181 tree tmp;
7183 /* Calculate the result when the argument is a constant. */
7184 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7185 return tmp;
7187 /* Optimize fn(-x) into fn(x). */
7188 if ((tmp = fold_strip_sign_ops (arg)))
7189 return build_call_expr_loc (loc, fndecl, 1, tmp);
7192 return NULL_TREE;
7195 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7196 Return NULL_TREE if no simplification can be made. */
7198 static tree
7199 fold_builtin_tan (tree arg, tree type)
7201 enum built_in_function fcode;
7202 tree res;
7204 if (!validate_arg (arg, REAL_TYPE))
7205 return NULL_TREE;
7207 /* Calculate the result when the argument is a constant. */
7208 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7209 return res;
7211 /* Optimize tan(atan(x)) = x. */
7212 fcode = builtin_mathfn_code (arg);
7213 if (flag_unsafe_math_optimizations
7214 && (fcode == BUILT_IN_ATAN
7215 || fcode == BUILT_IN_ATANF
7216 || fcode == BUILT_IN_ATANL))
7217 return CALL_EXPR_ARG (arg, 0);
7219 return NULL_TREE;
7222 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7223 NULL_TREE if no simplification can be made. */
7225 static tree
7226 fold_builtin_sincos (location_t loc,
7227 tree arg0, tree arg1, tree arg2)
7229 tree type;
7230 tree res, fn, call;
7232 if (!validate_arg (arg0, REAL_TYPE)
7233 || !validate_arg (arg1, POINTER_TYPE)
7234 || !validate_arg (arg2, POINTER_TYPE))
7235 return NULL_TREE;
7237 type = TREE_TYPE (arg0);
7239 /* Calculate the result when the argument is a constant. */
7240 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7241 return res;
7243 /* Canonicalize sincos to cexpi. */
7244 if (!TARGET_C99_FUNCTIONS)
7245 return NULL_TREE;
7246 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7247 if (!fn)
7248 return NULL_TREE;
7250 call = build_call_expr_loc (loc, fn, 1, arg0);
7251 call = builtin_save_expr (call);
7253 return build2 (COMPOUND_EXPR, void_type_node,
7254 build2 (MODIFY_EXPR, void_type_node,
7255 build_fold_indirect_ref_loc (loc, arg1),
7256 build1 (IMAGPART_EXPR, type, call)),
7257 build2 (MODIFY_EXPR, void_type_node,
7258 build_fold_indirect_ref_loc (loc, arg2),
7259 build1 (REALPART_EXPR, type, call)));
7262 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7263 NULL_TREE if no simplification can be made. */
7265 static tree
7266 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7268 tree rtype;
7269 tree realp, imagp, ifn;
7270 tree res;
7272 if (!validate_arg (arg0, COMPLEX_TYPE)
7273 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7274 return NULL_TREE;
7276 /* Calculate the result when the argument is a constant. */
7277 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7278 return res;
7280 rtype = TREE_TYPE (TREE_TYPE (arg0));
7282 /* In case we can figure out the real part of arg0 and it is constant zero
7283 fold to cexpi. */
7284 if (!TARGET_C99_FUNCTIONS)
7285 return NULL_TREE;
7286 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7287 if (!ifn)
7288 return NULL_TREE;
7290 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7291 && real_zerop (realp))
7293 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7294 return build_call_expr_loc (loc, ifn, 1, narg);
7297 /* In case we can easily decompose real and imaginary parts split cexp
7298 to exp (r) * cexpi (i). */
7299 if (flag_unsafe_math_optimizations
7300 && realp)
7302 tree rfn, rcall, icall;
7304 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7305 if (!rfn)
7306 return NULL_TREE;
7308 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7309 if (!imagp)
7310 return NULL_TREE;
7312 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7313 icall = builtin_save_expr (icall);
7314 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7315 rcall = builtin_save_expr (rcall);
7316 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7317 fold_build2_loc (loc, MULT_EXPR, rtype,
7318 rcall,
7319 fold_build1_loc (loc, REALPART_EXPR,
7320 rtype, icall)),
7321 fold_build2_loc (loc, MULT_EXPR, rtype,
7322 rcall,
7323 fold_build1_loc (loc, IMAGPART_EXPR,
7324 rtype, icall)));
7327 return NULL_TREE;
7330 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7331 Return NULL_TREE if no simplification can be made. */
7333 static tree
7334 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7336 if (!validate_arg (arg, REAL_TYPE))
7337 return NULL_TREE;
7339 /* Optimize trunc of constant value. */
7340 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7342 REAL_VALUE_TYPE r, x;
7343 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7345 x = TREE_REAL_CST (arg);
7346 real_trunc (&r, TYPE_MODE (type), &x);
7347 return build_real (type, r);
7350 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7353 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7354 Return NULL_TREE if no simplification can be made. */
7356 static tree
7357 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7359 if (!validate_arg (arg, REAL_TYPE))
7360 return NULL_TREE;
7362 /* Optimize floor of constant value. */
7363 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7365 REAL_VALUE_TYPE x;
7367 x = TREE_REAL_CST (arg);
7368 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7370 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7371 REAL_VALUE_TYPE r;
7373 real_floor (&r, TYPE_MODE (type), &x);
7374 return build_real (type, r);
7378 /* Fold floor (x) where x is nonnegative to trunc (x). */
7379 if (tree_expr_nonnegative_p (arg))
7381 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7382 if (truncfn)
7383 return build_call_expr_loc (loc, truncfn, 1, arg);
7386 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7389 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7390 Return NULL_TREE if no simplification can be made. */
7392 static tree
7393 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7395 if (!validate_arg (arg, REAL_TYPE))
7396 return NULL_TREE;
7398 /* Optimize ceil of constant value. */
7399 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7401 REAL_VALUE_TYPE x;
7403 x = TREE_REAL_CST (arg);
7404 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7406 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7407 REAL_VALUE_TYPE r;
7409 real_ceil (&r, TYPE_MODE (type), &x);
7410 return build_real (type, r);
7414 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7417 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7418 Return NULL_TREE if no simplification can be made. */
7420 static tree
7421 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7423 if (!validate_arg (arg, REAL_TYPE))
7424 return NULL_TREE;
7426 /* Optimize round of constant value. */
7427 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7429 REAL_VALUE_TYPE x;
7431 x = TREE_REAL_CST (arg);
7432 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7434 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7435 REAL_VALUE_TYPE r;
7437 real_round (&r, TYPE_MODE (type), &x);
7438 return build_real (type, r);
7442 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7445 /* Fold function call to builtin lround, lroundf or lroundl (or the
7446 corresponding long long versions) and other rounding functions. ARG
7447 is the argument to the call. Return NULL_TREE if no simplification
7448 can be made. */
7450 static tree
7451 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7453 if (!validate_arg (arg, REAL_TYPE))
7454 return NULL_TREE;
7456 /* Optimize lround of constant value. */
7457 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7459 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7461 if (real_isfinite (&x))
7463 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7464 tree ftype = TREE_TYPE (arg);
7465 unsigned HOST_WIDE_INT lo2;
7466 HOST_WIDE_INT hi, lo;
7467 REAL_VALUE_TYPE r;
7469 switch (DECL_FUNCTION_CODE (fndecl))
7471 CASE_FLT_FN (BUILT_IN_LFLOOR):
7472 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7473 real_floor (&r, TYPE_MODE (ftype), &x);
7474 break;
7476 CASE_FLT_FN (BUILT_IN_LCEIL):
7477 CASE_FLT_FN (BUILT_IN_LLCEIL):
7478 real_ceil (&r, TYPE_MODE (ftype), &x);
7479 break;
7481 CASE_FLT_FN (BUILT_IN_LROUND):
7482 CASE_FLT_FN (BUILT_IN_LLROUND):
7483 real_round (&r, TYPE_MODE (ftype), &x);
7484 break;
7486 default:
7487 gcc_unreachable ();
7490 REAL_VALUE_TO_INT (&lo, &hi, r);
7491 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7492 return build_int_cst_wide (itype, lo2, hi);
7496 switch (DECL_FUNCTION_CODE (fndecl))
7498 CASE_FLT_FN (BUILT_IN_LFLOOR):
7499 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7500 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7501 if (tree_expr_nonnegative_p (arg))
7502 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7503 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7504 break;
7505 default:;
7508 return fold_fixed_mathfn (loc, fndecl, arg);
7511 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7512 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7513 the argument to the call. Return NULL_TREE if no simplification can
7514 be made. */
7516 static tree
7517 fold_builtin_bitop (tree fndecl, tree arg)
7519 if (!validate_arg (arg, INTEGER_TYPE))
7520 return NULL_TREE;
7522 /* Optimize for constant argument. */
7523 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7525 HOST_WIDE_INT hi, width, result;
7526 unsigned HOST_WIDE_INT lo;
7527 tree type;
7529 type = TREE_TYPE (arg);
7530 width = TYPE_PRECISION (type);
7531 lo = TREE_INT_CST_LOW (arg);
7533 /* Clear all the bits that are beyond the type's precision. */
7534 if (width > HOST_BITS_PER_WIDE_INT)
7536 hi = TREE_INT_CST_HIGH (arg);
7537 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7538 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7540 else
7542 hi = 0;
7543 if (width < HOST_BITS_PER_WIDE_INT)
7544 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7547 switch (DECL_FUNCTION_CODE (fndecl))
7549 CASE_INT_FN (BUILT_IN_FFS):
7550 if (lo != 0)
7551 result = exact_log2 (lo & -lo) + 1;
7552 else if (hi != 0)
7553 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7554 else
7555 result = 0;
7556 break;
7558 CASE_INT_FN (BUILT_IN_CLZ):
7559 if (hi != 0)
7560 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7561 else if (lo != 0)
7562 result = width - floor_log2 (lo) - 1;
7563 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7564 result = width;
7565 break;
7567 CASE_INT_FN (BUILT_IN_CTZ):
7568 if (lo != 0)
7569 result = exact_log2 (lo & -lo);
7570 else if (hi != 0)
7571 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7572 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7573 result = width;
7574 break;
7576 CASE_INT_FN (BUILT_IN_POPCOUNT):
7577 result = 0;
7578 while (lo)
7579 result++, lo &= lo - 1;
7580 while (hi)
7581 result++, hi &= hi - 1;
7582 break;
7584 CASE_INT_FN (BUILT_IN_PARITY):
7585 result = 0;
7586 while (lo)
7587 result++, lo &= lo - 1;
7588 while (hi)
7589 result++, hi &= hi - 1;
7590 result &= 1;
7591 break;
7593 default:
7594 gcc_unreachable ();
7597 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7600 return NULL_TREE;
7603 /* Fold function call to builtin_bswap and the long and long long
7604 variants. Return NULL_TREE if no simplification can be made. */
7605 static tree
7606 fold_builtin_bswap (tree fndecl, tree arg)
7608 if (! validate_arg (arg, INTEGER_TYPE))
7609 return NULL_TREE;
7611 /* Optimize constant value. */
7612 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7614 HOST_WIDE_INT hi, width, r_hi = 0;
7615 unsigned HOST_WIDE_INT lo, r_lo = 0;
7616 tree type;
7618 type = TREE_TYPE (arg);
7619 width = TYPE_PRECISION (type);
7620 lo = TREE_INT_CST_LOW (arg);
7621 hi = TREE_INT_CST_HIGH (arg);
7623 switch (DECL_FUNCTION_CODE (fndecl))
7625 case BUILT_IN_BSWAP32:
7626 case BUILT_IN_BSWAP64:
7628 int s;
7630 for (s = 0; s < width; s += 8)
7632 int d = width - s - 8;
7633 unsigned HOST_WIDE_INT byte;
7635 if (s < HOST_BITS_PER_WIDE_INT)
7636 byte = (lo >> s) & 0xff;
7637 else
7638 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7640 if (d < HOST_BITS_PER_WIDE_INT)
7641 r_lo |= byte << d;
7642 else
7643 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7647 break;
7649 default:
7650 gcc_unreachable ();
7653 if (width < HOST_BITS_PER_WIDE_INT)
7654 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7655 else
7656 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7659 return NULL_TREE;
7662 /* A subroutine of fold_builtin to fold the various logarithmic
7663 functions. Return NULL_TREE if no simplification can me made.
7664 FUNC is the corresponding MPFR logarithm function. */
7666 static tree
7667 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7668 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7670 if (validate_arg (arg, REAL_TYPE))
7672 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7673 tree res;
7674 const enum built_in_function fcode = builtin_mathfn_code (arg);
7676 /* Calculate the result when the argument is a constant. */
7677 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7678 return res;
7680 /* Special case, optimize logN(expN(x)) = x. */
7681 if (flag_unsafe_math_optimizations
7682 && ((func == mpfr_log
7683 && (fcode == BUILT_IN_EXP
7684 || fcode == BUILT_IN_EXPF
7685 || fcode == BUILT_IN_EXPL))
7686 || (func == mpfr_log2
7687 && (fcode == BUILT_IN_EXP2
7688 || fcode == BUILT_IN_EXP2F
7689 || fcode == BUILT_IN_EXP2L))
7690 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7691 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7693 /* Optimize logN(func()) for various exponential functions. We
7694 want to determine the value "x" and the power "exponent" in
7695 order to transform logN(x**exponent) into exponent*logN(x). */
7696 if (flag_unsafe_math_optimizations)
7698 tree exponent = 0, x = 0;
7700 switch (fcode)
7702 CASE_FLT_FN (BUILT_IN_EXP):
7703 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7704 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7705 dconst_e ()));
7706 exponent = CALL_EXPR_ARG (arg, 0);
7707 break;
7708 CASE_FLT_FN (BUILT_IN_EXP2):
7709 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7710 x = build_real (type, dconst2);
7711 exponent = CALL_EXPR_ARG (arg, 0);
7712 break;
7713 CASE_FLT_FN (BUILT_IN_EXP10):
7714 CASE_FLT_FN (BUILT_IN_POW10):
7715 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7717 REAL_VALUE_TYPE dconst10;
7718 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7719 x = build_real (type, dconst10);
7721 exponent = CALL_EXPR_ARG (arg, 0);
7722 break;
7723 CASE_FLT_FN (BUILT_IN_SQRT):
7724 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7725 x = CALL_EXPR_ARG (arg, 0);
7726 exponent = build_real (type, dconsthalf);
7727 break;
7728 CASE_FLT_FN (BUILT_IN_CBRT):
7729 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7730 x = CALL_EXPR_ARG (arg, 0);
7731 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7732 dconst_third ()));
7733 break;
7734 CASE_FLT_FN (BUILT_IN_POW):
7735 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7736 x = CALL_EXPR_ARG (arg, 0);
7737 exponent = CALL_EXPR_ARG (arg, 1);
7738 break;
7739 default:
7740 break;
7743 /* Now perform the optimization. */
7744 if (x && exponent)
7746 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7747 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7752 return NULL_TREE;
7755 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7756 NULL_TREE if no simplification can be made. */
7758 static tree
7759 fold_builtin_hypot (location_t loc, tree fndecl,
7760 tree arg0, tree arg1, tree type)
7762 tree res, narg0, narg1;
7764 if (!validate_arg (arg0, REAL_TYPE)
7765 || !validate_arg (arg1, REAL_TYPE))
7766 return NULL_TREE;
7768 /* Calculate the result when the argument is a constant. */
7769 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7770 return res;
7772 /* If either argument to hypot has a negate or abs, strip that off.
7773 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7774 narg0 = fold_strip_sign_ops (arg0);
7775 narg1 = fold_strip_sign_ops (arg1);
7776 if (narg0 || narg1)
7778 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7779 narg1 ? narg1 : arg1);
7782 /* If either argument is zero, hypot is fabs of the other. */
7783 if (real_zerop (arg0))
7784 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7785 else if (real_zerop (arg1))
7786 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7788 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7789 if (flag_unsafe_math_optimizations
7790 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7792 const REAL_VALUE_TYPE sqrt2_trunc
7793 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7794 return fold_build2_loc (loc, MULT_EXPR, type,
7795 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7796 build_real (type, sqrt2_trunc));
7799 return NULL_TREE;
7803 /* Fold a builtin function call to pow, powf, or powl. Return
7804 NULL_TREE if no simplification can be made. */
7805 static tree
7806 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7808 tree res;
7810 if (!validate_arg (arg0, REAL_TYPE)
7811 || !validate_arg (arg1, REAL_TYPE))
7812 return NULL_TREE;
7814 /* Calculate the result when the argument is a constant. */
7815 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7816 return res;
7818 /* Optimize pow(1.0,y) = 1.0. */
7819 if (real_onep (arg0))
7820 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7822 if (TREE_CODE (arg1) == REAL_CST
7823 && !TREE_OVERFLOW (arg1))
7825 REAL_VALUE_TYPE cint;
7826 REAL_VALUE_TYPE c;
7827 HOST_WIDE_INT n;
7829 c = TREE_REAL_CST (arg1);
7831 /* Optimize pow(x,0.0) = 1.0. */
7832 if (REAL_VALUES_EQUAL (c, dconst0))
7833 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7834 arg0);
7836 /* Optimize pow(x,1.0) = x. */
7837 if (REAL_VALUES_EQUAL (c, dconst1))
7838 return arg0;
7840 /* Optimize pow(x,-1.0) = 1.0/x. */
7841 if (REAL_VALUES_EQUAL (c, dconstm1))
7842 return fold_build2_loc (loc, RDIV_EXPR, type,
7843 build_real (type, dconst1), arg0);
7845 /* Optimize pow(x,0.5) = sqrt(x). */
7846 if (flag_unsafe_math_optimizations
7847 && REAL_VALUES_EQUAL (c, dconsthalf))
7849 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7851 if (sqrtfn != NULL_TREE)
7852 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7855 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7856 if (flag_unsafe_math_optimizations)
7858 const REAL_VALUE_TYPE dconstroot
7859 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7861 if (REAL_VALUES_EQUAL (c, dconstroot))
7863 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7864 if (cbrtfn != NULL_TREE)
7865 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7869 /* Check for an integer exponent. */
7870 n = real_to_integer (&c);
7871 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7872 if (real_identical (&c, &cint))
7874 /* Attempt to evaluate pow at compile-time, unless this should
7875 raise an exception. */
7876 if (TREE_CODE (arg0) == REAL_CST
7877 && !TREE_OVERFLOW (arg0)
7878 && (n > 0
7879 || (!flag_trapping_math && !flag_errno_math)
7880 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7882 REAL_VALUE_TYPE x;
7883 bool inexact;
7885 x = TREE_REAL_CST (arg0);
7886 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7887 if (flag_unsafe_math_optimizations || !inexact)
7888 return build_real (type, x);
7891 /* Strip sign ops from even integer powers. */
7892 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7894 tree narg0 = fold_strip_sign_ops (arg0);
7895 if (narg0)
7896 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7901 if (flag_unsafe_math_optimizations)
7903 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7905 /* Optimize pow(expN(x),y) = expN(x*y). */
7906 if (BUILTIN_EXPONENT_P (fcode))
7908 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7909 tree arg = CALL_EXPR_ARG (arg0, 0);
7910 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7911 return build_call_expr_loc (loc, expfn, 1, arg);
7914 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7915 if (BUILTIN_SQRT_P (fcode))
7917 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7918 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7919 build_real (type, dconsthalf));
7920 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7923 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7924 if (BUILTIN_CBRT_P (fcode))
7926 tree arg = CALL_EXPR_ARG (arg0, 0);
7927 if (tree_expr_nonnegative_p (arg))
7929 const REAL_VALUE_TYPE dconstroot
7930 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7931 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7932 build_real (type, dconstroot));
7933 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7937 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7938 if (fcode == BUILT_IN_POW
7939 || fcode == BUILT_IN_POWF
7940 || fcode == BUILT_IN_POWL)
7942 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7943 if (tree_expr_nonnegative_p (arg00))
7945 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7946 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7947 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7952 return NULL_TREE;
7955 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7956 Return NULL_TREE if no simplification can be made. */
7957 static tree
7958 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7959 tree arg0, tree arg1, tree type)
7961 if (!validate_arg (arg0, REAL_TYPE)
7962 || !validate_arg (arg1, INTEGER_TYPE))
7963 return NULL_TREE;
7965 /* Optimize pow(1.0,y) = 1.0. */
7966 if (real_onep (arg0))
7967 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7969 if (host_integerp (arg1, 0))
7971 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7973 /* Evaluate powi at compile-time. */
7974 if (TREE_CODE (arg0) == REAL_CST
7975 && !TREE_OVERFLOW (arg0))
7977 REAL_VALUE_TYPE x;
7978 x = TREE_REAL_CST (arg0);
7979 real_powi (&x, TYPE_MODE (type), &x, c);
7980 return build_real (type, x);
7983 /* Optimize pow(x,0) = 1.0. */
7984 if (c == 0)
7985 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7986 arg0);
7988 /* Optimize pow(x,1) = x. */
7989 if (c == 1)
7990 return arg0;
7992 /* Optimize pow(x,-1) = 1.0/x. */
7993 if (c == -1)
7994 return fold_build2_loc (loc, RDIV_EXPR, type,
7995 build_real (type, dconst1), arg0);
7998 return NULL_TREE;
8001 /* A subroutine of fold_builtin to fold the various exponent
8002 functions. Return NULL_TREE if no simplification can be made.
8003 FUNC is the corresponding MPFR exponent function. */
8005 static tree
8006 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8007 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8009 if (validate_arg (arg, REAL_TYPE))
8011 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8012 tree res;
8014 /* Calculate the result when the argument is a constant. */
8015 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8016 return res;
8018 /* Optimize expN(logN(x)) = x. */
8019 if (flag_unsafe_math_optimizations)
8021 const enum built_in_function fcode = builtin_mathfn_code (arg);
8023 if ((func == mpfr_exp
8024 && (fcode == BUILT_IN_LOG
8025 || fcode == BUILT_IN_LOGF
8026 || fcode == BUILT_IN_LOGL))
8027 || (func == mpfr_exp2
8028 && (fcode == BUILT_IN_LOG2
8029 || fcode == BUILT_IN_LOG2F
8030 || fcode == BUILT_IN_LOG2L))
8031 || (func == mpfr_exp10
8032 && (fcode == BUILT_IN_LOG10
8033 || fcode == BUILT_IN_LOG10F
8034 || fcode == BUILT_IN_LOG10L)))
8035 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8039 return NULL_TREE;
8042 /* Return true if VAR is a VAR_DECL or a component thereof. */
8044 static bool
8045 var_decl_component_p (tree var)
8047 tree inner = var;
8048 while (handled_component_p (inner))
8049 inner = TREE_OPERAND (inner, 0);
8050 return SSA_VAR_P (inner);
8053 /* Fold function call to builtin memset. Return
8054 NULL_TREE if no simplification can be made. */
8056 static tree
8057 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8058 tree type, bool ignore)
8060 tree var, ret, etype;
8061 unsigned HOST_WIDE_INT length, cval;
8063 if (! validate_arg (dest, POINTER_TYPE)
8064 || ! validate_arg (c, INTEGER_TYPE)
8065 || ! validate_arg (len, INTEGER_TYPE))
8066 return NULL_TREE;
8068 if (! host_integerp (len, 1))
8069 return NULL_TREE;
8071 /* If the LEN parameter is zero, return DEST. */
8072 if (integer_zerop (len))
8073 return omit_one_operand_loc (loc, type, dest, c);
8075 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8076 return NULL_TREE;
8078 var = dest;
8079 STRIP_NOPS (var);
8080 if (TREE_CODE (var) != ADDR_EXPR)
8081 return NULL_TREE;
8083 var = TREE_OPERAND (var, 0);
8084 if (TREE_THIS_VOLATILE (var))
8085 return NULL_TREE;
8087 etype = TREE_TYPE (var);
8088 if (TREE_CODE (etype) == ARRAY_TYPE)
8089 etype = TREE_TYPE (etype);
8091 if (!INTEGRAL_TYPE_P (etype)
8092 && !POINTER_TYPE_P (etype))
8093 return NULL_TREE;
8095 if (! var_decl_component_p (var))
8096 return NULL_TREE;
8098 length = tree_low_cst (len, 1);
8099 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8100 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8101 < (int) length)
8102 return NULL_TREE;
8104 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8105 return NULL_TREE;
8107 if (integer_zerop (c))
8108 cval = 0;
8109 else
8111 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8112 return NULL_TREE;
8114 cval = tree_low_cst (c, 1);
8115 cval &= 0xff;
8116 cval |= cval << 8;
8117 cval |= cval << 16;
8118 cval |= (cval << 31) << 1;
8121 ret = build_int_cst_type (etype, cval);
8122 var = build_fold_indirect_ref_loc (loc,
8123 fold_convert_loc (loc,
8124 build_pointer_type (etype),
8125 dest));
8126 ret = build2 (MODIFY_EXPR, etype, var, ret);
8127 if (ignore)
8128 return ret;
8130 return omit_one_operand_loc (loc, type, dest, ret);
8133 /* Fold function call to builtin memset. Return
8134 NULL_TREE if no simplification can be made. */
8136 static tree
8137 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8139 if (! validate_arg (dest, POINTER_TYPE)
8140 || ! validate_arg (size, INTEGER_TYPE))
8141 return NULL_TREE;
8143 if (!ignore)
8144 return NULL_TREE;
8146 /* New argument list transforming bzero(ptr x, int y) to
8147 memset(ptr x, int 0, size_t y). This is done this way
8148 so that if it isn't expanded inline, we fallback to
8149 calling bzero instead of memset. */
8151 return fold_builtin_memset (loc, dest, integer_zero_node,
8152 fold_convert_loc (loc, sizetype, size),
8153 void_type_node, ignore);
8156 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8157 NULL_TREE if no simplification can be made.
8158 If ENDP is 0, return DEST (like memcpy).
8159 If ENDP is 1, return DEST+LEN (like mempcpy).
8160 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8161 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8162 (memmove). */
8164 static tree
8165 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8166 tree len, tree type, bool ignore, int endp)
8168 tree destvar, srcvar, expr;
8170 if (! validate_arg (dest, POINTER_TYPE)
8171 || ! validate_arg (src, POINTER_TYPE)
8172 || ! validate_arg (len, INTEGER_TYPE))
8173 return NULL_TREE;
8175 /* If the LEN parameter is zero, return DEST. */
8176 if (integer_zerop (len))
8177 return omit_one_operand_loc (loc, type, dest, src);
8179 /* If SRC and DEST are the same (and not volatile), return
8180 DEST{,+LEN,+LEN-1}. */
8181 if (operand_equal_p (src, dest, 0))
8182 expr = len;
8183 else
8185 tree srctype, desttype;
8186 int src_align, dest_align;
8188 if (endp == 3)
8190 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8191 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8193 /* Both DEST and SRC must be pointer types.
8194 ??? This is what old code did. Is the testing for pointer types
8195 really mandatory?
8197 If either SRC is readonly or length is 1, we can use memcpy. */
8198 if (!dest_align || !src_align)
8199 return NULL_TREE;
8200 if (readonly_data_expr (src)
8201 || (host_integerp (len, 1)
8202 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8203 >= tree_low_cst (len, 1))))
8205 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8206 if (!fn)
8207 return NULL_TREE;
8208 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8211 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8212 srcvar = build_fold_indirect_ref_loc (loc, src);
8213 destvar = build_fold_indirect_ref_loc (loc, dest);
8214 if (srcvar
8215 && !TREE_THIS_VOLATILE (srcvar)
8216 && destvar
8217 && !TREE_THIS_VOLATILE (destvar))
8219 tree src_base, dest_base, fn;
8220 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8221 HOST_WIDE_INT size = -1;
8222 HOST_WIDE_INT maxsize = -1;
8224 src_base = srcvar;
8225 if (handled_component_p (src_base))
8226 src_base = get_ref_base_and_extent (src_base, &src_offset,
8227 &size, &maxsize);
8228 dest_base = destvar;
8229 if (handled_component_p (dest_base))
8230 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8231 &size, &maxsize);
8232 if (host_integerp (len, 1))
8234 maxsize = tree_low_cst (len, 1);
8235 if (maxsize
8236 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8237 maxsize = -1;
8238 else
8239 maxsize *= BITS_PER_UNIT;
8241 else
8242 maxsize = -1;
8243 if (SSA_VAR_P (src_base)
8244 && SSA_VAR_P (dest_base))
8246 if (operand_equal_p (src_base, dest_base, 0)
8247 && ranges_overlap_p (src_offset, maxsize,
8248 dest_offset, maxsize))
8249 return NULL_TREE;
8251 else if (TREE_CODE (src_base) == INDIRECT_REF
8252 && TREE_CODE (dest_base) == INDIRECT_REF)
8254 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8255 TREE_OPERAND (dest_base, 0), 0)
8256 || ranges_overlap_p (src_offset, maxsize,
8257 dest_offset, maxsize))
8258 return NULL_TREE;
8260 else
8261 return NULL_TREE;
8263 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8264 if (!fn)
8265 return NULL_TREE;
8266 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8268 return NULL_TREE;
8271 if (!host_integerp (len, 0))
8272 return NULL_TREE;
8273 /* FIXME:
8274 This logic lose for arguments like (type *)malloc (sizeof (type)),
8275 since we strip the casts of up to VOID return value from malloc.
8276 Perhaps we ought to inherit type from non-VOID argument here? */
8277 STRIP_NOPS (src);
8278 STRIP_NOPS (dest);
8279 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8280 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8282 tree tem = TREE_OPERAND (src, 0);
8283 STRIP_NOPS (tem);
8284 if (tem != TREE_OPERAND (src, 0))
8285 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8287 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8289 tree tem = TREE_OPERAND (dest, 0);
8290 STRIP_NOPS (tem);
8291 if (tem != TREE_OPERAND (dest, 0))
8292 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8294 srctype = TREE_TYPE (TREE_TYPE (src));
8295 if (srctype
8296 && TREE_CODE (srctype) == ARRAY_TYPE
8297 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8299 srctype = TREE_TYPE (srctype);
8300 STRIP_NOPS (src);
8301 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8303 desttype = TREE_TYPE (TREE_TYPE (dest));
8304 if (desttype
8305 && TREE_CODE (desttype) == ARRAY_TYPE
8306 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8308 desttype = TREE_TYPE (desttype);
8309 STRIP_NOPS (dest);
8310 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8312 if (!srctype || !desttype
8313 || !TYPE_SIZE_UNIT (srctype)
8314 || !TYPE_SIZE_UNIT (desttype)
8315 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8316 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8317 || TYPE_VOLATILE (srctype)
8318 || TYPE_VOLATILE (desttype))
8319 return NULL_TREE;
8321 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8322 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8323 if (dest_align < (int) TYPE_ALIGN (desttype)
8324 || src_align < (int) TYPE_ALIGN (srctype))
8325 return NULL_TREE;
8327 if (!ignore)
8328 dest = builtin_save_expr (dest);
8330 srcvar = NULL_TREE;
8331 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8333 srcvar = build_fold_indirect_ref_loc (loc, src);
8334 if (TREE_THIS_VOLATILE (srcvar))
8335 return NULL_TREE;
8336 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8337 srcvar = NULL_TREE;
8338 /* With memcpy, it is possible to bypass aliasing rules, so without
8339 this check i.e. execute/20060930-2.c would be misoptimized,
8340 because it use conflicting alias set to hold argument for the
8341 memcpy call. This check is probably unnecessary with
8342 -fno-strict-aliasing. Similarly for destvar. See also
8343 PR29286. */
8344 else if (!var_decl_component_p (srcvar))
8345 srcvar = NULL_TREE;
8348 destvar = NULL_TREE;
8349 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8351 destvar = build_fold_indirect_ref_loc (loc, dest);
8352 if (TREE_THIS_VOLATILE (destvar))
8353 return NULL_TREE;
8354 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8355 destvar = NULL_TREE;
8356 else if (!var_decl_component_p (destvar))
8357 destvar = NULL_TREE;
8360 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8361 return NULL_TREE;
8363 if (srcvar == NULL_TREE)
8365 tree srcptype;
8366 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8367 return NULL_TREE;
8369 srctype = build_qualified_type (desttype, 0);
8370 if (src_align < (int) TYPE_ALIGN (srctype))
8372 if (AGGREGATE_TYPE_P (srctype)
8373 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8374 return NULL_TREE;
8376 srctype = build_variant_type_copy (srctype);
8377 TYPE_ALIGN (srctype) = src_align;
8378 TYPE_USER_ALIGN (srctype) = 1;
8379 TYPE_PACKED (srctype) = 1;
8381 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8382 src = fold_convert_loc (loc, srcptype, src);
8383 srcvar = build_fold_indirect_ref_loc (loc, src);
8385 else if (destvar == NULL_TREE)
8387 tree destptype;
8388 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8389 return NULL_TREE;
8391 desttype = build_qualified_type (srctype, 0);
8392 if (dest_align < (int) TYPE_ALIGN (desttype))
8394 if (AGGREGATE_TYPE_P (desttype)
8395 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8396 return NULL_TREE;
8398 desttype = build_variant_type_copy (desttype);
8399 TYPE_ALIGN (desttype) = dest_align;
8400 TYPE_USER_ALIGN (desttype) = 1;
8401 TYPE_PACKED (desttype) = 1;
8403 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8404 dest = fold_convert_loc (loc, destptype, dest);
8405 destvar = build_fold_indirect_ref_loc (loc, dest);
8408 if (srctype == desttype
8409 || (gimple_in_ssa_p (cfun)
8410 && useless_type_conversion_p (desttype, srctype)))
8411 expr = srcvar;
8412 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8413 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8414 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8415 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8416 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8417 else
8418 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8419 TREE_TYPE (destvar), srcvar);
8420 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8423 if (ignore)
8424 return expr;
8426 if (endp == 0 || endp == 3)
8427 return omit_one_operand_loc (loc, type, dest, expr);
8429 if (expr == len)
8430 expr = NULL_TREE;
8432 if (endp == 2)
8433 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8434 ssize_int (1));
8436 len = fold_convert_loc (loc, sizetype, len);
8437 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8438 dest = fold_convert_loc (loc, type, dest);
8439 if (expr)
8440 dest = omit_one_operand_loc (loc, type, dest, expr);
8441 return dest;
8444 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8445 If LEN is not NULL, it represents the length of the string to be
8446 copied. Return NULL_TREE if no simplification can be made. */
8448 tree
8449 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8451 tree fn;
8453 if (!validate_arg (dest, POINTER_TYPE)
8454 || !validate_arg (src, POINTER_TYPE))
8455 return NULL_TREE;
8457 /* If SRC and DEST are the same (and not volatile), return DEST. */
8458 if (operand_equal_p (src, dest, 0))
8459 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8461 if (optimize_function_for_size_p (cfun))
8462 return NULL_TREE;
8464 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8465 if (!fn)
8466 return NULL_TREE;
8468 if (!len)
8470 len = c_strlen (src, 1);
8471 if (! len || TREE_SIDE_EFFECTS (len))
8472 return NULL_TREE;
8475 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8476 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8477 build_call_expr_loc (loc, fn, 3, dest, src, len));
8480 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8481 Return NULL_TREE if no simplification can be made. */
8483 static tree
8484 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8486 tree fn, len, lenp1, call, type;
8488 if (!validate_arg (dest, POINTER_TYPE)
8489 || !validate_arg (src, POINTER_TYPE))
8490 return NULL_TREE;
8492 len = c_strlen (src, 1);
8493 if (!len
8494 || TREE_CODE (len) != INTEGER_CST)
8495 return NULL_TREE;
8497 if (optimize_function_for_size_p (cfun)
8498 /* If length is zero it's small enough. */
8499 && !integer_zerop (len))
8500 return NULL_TREE;
8502 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8503 if (!fn)
8504 return NULL_TREE;
8506 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8507 /* We use dest twice in building our expression. Save it from
8508 multiple expansions. */
8509 dest = builtin_save_expr (dest);
8510 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8512 type = TREE_TYPE (TREE_TYPE (fndecl));
8513 len = fold_convert_loc (loc, sizetype, len);
8514 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8515 dest = fold_convert_loc (loc, type, dest);
8516 dest = omit_one_operand_loc (loc, type, dest, call);
8517 return dest;
8520 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8521 If SLEN is not NULL, it represents the length of the source string.
8522 Return NULL_TREE if no simplification can be made. */
8524 tree
8525 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8526 tree src, tree len, tree slen)
8528 tree fn;
8530 if (!validate_arg (dest, POINTER_TYPE)
8531 || !validate_arg (src, POINTER_TYPE)
8532 || !validate_arg (len, INTEGER_TYPE))
8533 return NULL_TREE;
8535 /* If the LEN parameter is zero, return DEST. */
8536 if (integer_zerop (len))
8537 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8539 /* We can't compare slen with len as constants below if len is not a
8540 constant. */
8541 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8542 return NULL_TREE;
8544 if (!slen)
8545 slen = c_strlen (src, 1);
8547 /* Now, we must be passed a constant src ptr parameter. */
8548 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8549 return NULL_TREE;
8551 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8553 /* We do not support simplification of this case, though we do
8554 support it when expanding trees into RTL. */
8555 /* FIXME: generate a call to __builtin_memset. */
8556 if (tree_int_cst_lt (slen, len))
8557 return NULL_TREE;
8559 /* OK transform into builtin memcpy. */
8560 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8561 if (!fn)
8562 return NULL_TREE;
8563 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8564 build_call_expr_loc (loc, fn, 3, dest, src, len));
8567 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8568 arguments to the call, and TYPE is its return type.
8569 Return NULL_TREE if no simplification can be made. */
8571 static tree
8572 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8574 if (!validate_arg (arg1, POINTER_TYPE)
8575 || !validate_arg (arg2, INTEGER_TYPE)
8576 || !validate_arg (len, INTEGER_TYPE))
8577 return NULL_TREE;
8578 else
8580 const char *p1;
8582 if (TREE_CODE (arg2) != INTEGER_CST
8583 || !host_integerp (len, 1))
8584 return NULL_TREE;
8586 p1 = c_getstr (arg1);
8587 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8589 char c;
8590 const char *r;
8591 tree tem;
8593 if (target_char_cast (arg2, &c))
8594 return NULL_TREE;
8596 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8598 if (r == NULL)
8599 return build_int_cst (TREE_TYPE (arg1), 0);
8601 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8602 size_int (r - p1));
8603 return fold_convert_loc (loc, type, tem);
8605 return NULL_TREE;
8609 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8610 Return NULL_TREE if no simplification can be made. */
8612 static tree
8613 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8615 const char *p1, *p2;
8617 if (!validate_arg (arg1, POINTER_TYPE)
8618 || !validate_arg (arg2, POINTER_TYPE)
8619 || !validate_arg (len, INTEGER_TYPE))
8620 return NULL_TREE;
8622 /* If the LEN parameter is zero, return zero. */
8623 if (integer_zerop (len))
8624 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8625 arg1, arg2);
8627 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8628 if (operand_equal_p (arg1, arg2, 0))
8629 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8631 p1 = c_getstr (arg1);
8632 p2 = c_getstr (arg2);
8634 /* If all arguments are constant, and the value of len is not greater
8635 than the lengths of arg1 and arg2, evaluate at compile-time. */
8636 if (host_integerp (len, 1) && p1 && p2
8637 && compare_tree_int (len, strlen (p1) + 1) <= 0
8638 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8640 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8642 if (r > 0)
8643 return integer_one_node;
8644 else if (r < 0)
8645 return integer_minus_one_node;
8646 else
8647 return integer_zero_node;
8650 /* If len parameter is one, return an expression corresponding to
8651 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8652 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8654 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8655 tree cst_uchar_ptr_node
8656 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8658 tree ind1
8659 = fold_convert_loc (loc, integer_type_node,
8660 build1 (INDIRECT_REF, cst_uchar_node,
8661 fold_convert_loc (loc,
8662 cst_uchar_ptr_node,
8663 arg1)));
8664 tree ind2
8665 = fold_convert_loc (loc, integer_type_node,
8666 build1 (INDIRECT_REF, cst_uchar_node,
8667 fold_convert_loc (loc,
8668 cst_uchar_ptr_node,
8669 arg2)));
8670 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8673 return NULL_TREE;
8676 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8677 Return NULL_TREE if no simplification can be made. */
8679 static tree
8680 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8682 const char *p1, *p2;
8684 if (!validate_arg (arg1, POINTER_TYPE)
8685 || !validate_arg (arg2, POINTER_TYPE))
8686 return NULL_TREE;
8688 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8689 if (operand_equal_p (arg1, arg2, 0))
8690 return integer_zero_node;
8692 p1 = c_getstr (arg1);
8693 p2 = c_getstr (arg2);
8695 if (p1 && p2)
8697 const int i = strcmp (p1, p2);
8698 if (i < 0)
8699 return integer_minus_one_node;
8700 else if (i > 0)
8701 return integer_one_node;
8702 else
8703 return integer_zero_node;
8706 /* If the second arg is "", return *(const unsigned char*)arg1. */
8707 if (p2 && *p2 == '\0')
8709 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8710 tree cst_uchar_ptr_node
8711 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8713 return fold_convert_loc (loc, integer_type_node,
8714 build1 (INDIRECT_REF, cst_uchar_node,
8715 fold_convert_loc (loc,
8716 cst_uchar_ptr_node,
8717 arg1)));
8720 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8721 if (p1 && *p1 == '\0')
8723 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8724 tree cst_uchar_ptr_node
8725 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8727 tree temp
8728 = fold_convert_loc (loc, integer_type_node,
8729 build1 (INDIRECT_REF, cst_uchar_node,
8730 fold_convert_loc (loc,
8731 cst_uchar_ptr_node,
8732 arg2)));
8733 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8736 return NULL_TREE;
8739 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8740 Return NULL_TREE if no simplification can be made. */
8742 static tree
8743 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8745 const char *p1, *p2;
8747 if (!validate_arg (arg1, POINTER_TYPE)
8748 || !validate_arg (arg2, POINTER_TYPE)
8749 || !validate_arg (len, INTEGER_TYPE))
8750 return NULL_TREE;
8752 /* If the LEN parameter is zero, return zero. */
8753 if (integer_zerop (len))
8754 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8755 arg1, arg2);
8757 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8758 if (operand_equal_p (arg1, arg2, 0))
8759 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8761 p1 = c_getstr (arg1);
8762 p2 = c_getstr (arg2);
8764 if (host_integerp (len, 1) && p1 && p2)
8766 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8767 if (i > 0)
8768 return integer_one_node;
8769 else if (i < 0)
8770 return integer_minus_one_node;
8771 else
8772 return integer_zero_node;
8775 /* If the second arg is "", and the length is greater than zero,
8776 return *(const unsigned char*)arg1. */
8777 if (p2 && *p2 == '\0'
8778 && TREE_CODE (len) == INTEGER_CST
8779 && tree_int_cst_sgn (len) == 1)
8781 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8782 tree cst_uchar_ptr_node
8783 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8785 return fold_convert_loc (loc, integer_type_node,
8786 build1 (INDIRECT_REF, cst_uchar_node,
8787 fold_convert_loc (loc,
8788 cst_uchar_ptr_node,
8789 arg1)));
8792 /* If the first arg is "", and the length is greater than zero,
8793 return -*(const unsigned char*)arg2. */
8794 if (p1 && *p1 == '\0'
8795 && TREE_CODE (len) == INTEGER_CST
8796 && tree_int_cst_sgn (len) == 1)
8798 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8799 tree cst_uchar_ptr_node
8800 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8802 tree temp = fold_convert_loc (loc, integer_type_node,
8803 build1 (INDIRECT_REF, cst_uchar_node,
8804 fold_convert_loc (loc,
8805 cst_uchar_ptr_node,
8806 arg2)));
8807 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8810 /* If len parameter is one, return an expression corresponding to
8811 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8812 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8814 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8815 tree cst_uchar_ptr_node
8816 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8818 tree ind1 = fold_convert_loc (loc, integer_type_node,
8819 build1 (INDIRECT_REF, cst_uchar_node,
8820 fold_convert_loc (loc,
8821 cst_uchar_ptr_node,
8822 arg1)));
8823 tree ind2 = fold_convert_loc (loc, integer_type_node,
8824 build1 (INDIRECT_REF, cst_uchar_node,
8825 fold_convert_loc (loc,
8826 cst_uchar_ptr_node,
8827 arg2)));
8828 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8831 return NULL_TREE;
8834 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8835 ARG. Return NULL_TREE if no simplification can be made. */
8837 static tree
8838 fold_builtin_signbit (location_t loc, tree arg, tree type)
8840 tree temp;
8842 if (!validate_arg (arg, REAL_TYPE))
8843 return NULL_TREE;
8845 /* If ARG is a compile-time constant, determine the result. */
8846 if (TREE_CODE (arg) == REAL_CST
8847 && !TREE_OVERFLOW (arg))
8849 REAL_VALUE_TYPE c;
8851 c = TREE_REAL_CST (arg);
8852 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8853 return fold_convert_loc (loc, type, temp);
8856 /* If ARG is non-negative, the result is always zero. */
8857 if (tree_expr_nonnegative_p (arg))
8858 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8860 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8861 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8862 return fold_build2_loc (loc, LT_EXPR, type, arg,
8863 build_real (TREE_TYPE (arg), dconst0));
8865 return NULL_TREE;
8868 /* Fold function call to builtin copysign, copysignf or copysignl with
8869 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8870 be made. */
8872 static tree
8873 fold_builtin_copysign (location_t loc, tree fndecl,
8874 tree arg1, tree arg2, tree type)
8876 tree tem;
8878 if (!validate_arg (arg1, REAL_TYPE)
8879 || !validate_arg (arg2, REAL_TYPE))
8880 return NULL_TREE;
8882 /* copysign(X,X) is X. */
8883 if (operand_equal_p (arg1, arg2, 0))
8884 return fold_convert_loc (loc, type, arg1);
8886 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8887 if (TREE_CODE (arg1) == REAL_CST
8888 && TREE_CODE (arg2) == REAL_CST
8889 && !TREE_OVERFLOW (arg1)
8890 && !TREE_OVERFLOW (arg2))
8892 REAL_VALUE_TYPE c1, c2;
8894 c1 = TREE_REAL_CST (arg1);
8895 c2 = TREE_REAL_CST (arg2);
8896 /* c1.sign := c2.sign. */
8897 real_copysign (&c1, &c2);
8898 return build_real (type, c1);
8901 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8902 Remember to evaluate Y for side-effects. */
8903 if (tree_expr_nonnegative_p (arg2))
8904 return omit_one_operand_loc (loc, type,
8905 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8906 arg2);
8908 /* Strip sign changing operations for the first argument. */
8909 tem = fold_strip_sign_ops (arg1);
8910 if (tem)
8911 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8913 return NULL_TREE;
8916 /* Fold a call to builtin isascii with argument ARG. */
8918 static tree
8919 fold_builtin_isascii (location_t loc, tree arg)
8921 if (!validate_arg (arg, INTEGER_TYPE))
8922 return NULL_TREE;
8923 else
8925 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8926 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8927 build_int_cst (NULL_TREE,
8928 ~ (unsigned HOST_WIDE_INT) 0x7f));
8929 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8930 arg, integer_zero_node);
8934 /* Fold a call to builtin toascii with argument ARG. */
8936 static tree
8937 fold_builtin_toascii (location_t loc, tree arg)
8939 if (!validate_arg (arg, INTEGER_TYPE))
8940 return NULL_TREE;
8942 /* Transform toascii(c) -> (c & 0x7f). */
8943 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8944 build_int_cst (NULL_TREE, 0x7f));
8947 /* Fold a call to builtin isdigit with argument ARG. */
8949 static tree
8950 fold_builtin_isdigit (location_t loc, tree arg)
8952 if (!validate_arg (arg, INTEGER_TYPE))
8953 return NULL_TREE;
8954 else
8956 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8957 /* According to the C standard, isdigit is unaffected by locale.
8958 However, it definitely is affected by the target character set. */
8959 unsigned HOST_WIDE_INT target_digit0
8960 = lang_hooks.to_target_charset ('0');
8962 if (target_digit0 == 0)
8963 return NULL_TREE;
8965 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8966 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8967 build_int_cst (unsigned_type_node, target_digit0));
8968 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8969 build_int_cst (unsigned_type_node, 9));
8973 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8975 static tree
8976 fold_builtin_fabs (location_t loc, tree arg, tree type)
8978 if (!validate_arg (arg, REAL_TYPE))
8979 return NULL_TREE;
8981 arg = fold_convert_loc (loc, type, arg);
8982 if (TREE_CODE (arg) == REAL_CST)
8983 return fold_abs_const (arg, type);
8984 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8987 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8989 static tree
8990 fold_builtin_abs (location_t loc, tree arg, tree type)
8992 if (!validate_arg (arg, INTEGER_TYPE))
8993 return NULL_TREE;
8995 arg = fold_convert_loc (loc, type, arg);
8996 if (TREE_CODE (arg) == INTEGER_CST)
8997 return fold_abs_const (arg, type);
8998 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9001 /* Fold a call to builtin fmin or fmax. */
9003 static tree
9004 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9005 tree type, bool max)
9007 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9009 /* Calculate the result when the argument is a constant. */
9010 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9012 if (res)
9013 return res;
9015 /* If either argument is NaN, return the other one. Avoid the
9016 transformation if we get (and honor) a signalling NaN. Using
9017 omit_one_operand() ensures we create a non-lvalue. */
9018 if (TREE_CODE (arg0) == REAL_CST
9019 && real_isnan (&TREE_REAL_CST (arg0))
9020 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9021 || ! TREE_REAL_CST (arg0).signalling))
9022 return omit_one_operand_loc (loc, type, arg1, arg0);
9023 if (TREE_CODE (arg1) == REAL_CST
9024 && real_isnan (&TREE_REAL_CST (arg1))
9025 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9026 || ! TREE_REAL_CST (arg1).signalling))
9027 return omit_one_operand_loc (loc, type, arg0, arg1);
9029 /* Transform fmin/fmax(x,x) -> x. */
9030 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9031 return omit_one_operand_loc (loc, type, arg0, arg1);
9033 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9034 functions to return the numeric arg if the other one is NaN.
9035 These tree codes don't honor that, so only transform if
9036 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9037 handled, so we don't have to worry about it either. */
9038 if (flag_finite_math_only)
9039 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9040 fold_convert_loc (loc, type, arg0),
9041 fold_convert_loc (loc, type, arg1));
9043 return NULL_TREE;
9046 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9048 static tree
9049 fold_builtin_carg (location_t loc, tree arg, tree type)
9051 if (validate_arg (arg, COMPLEX_TYPE)
9052 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9054 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9056 if (atan2_fn)
9058 tree new_arg = builtin_save_expr (arg);
9059 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9060 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9061 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9065 return NULL_TREE;
9068 /* Fold a call to builtin logb/ilogb. */
9070 static tree
9071 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9073 if (! validate_arg (arg, REAL_TYPE))
9074 return NULL_TREE;
9076 STRIP_NOPS (arg);
9078 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9080 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9082 switch (value->cl)
9084 case rvc_nan:
9085 case rvc_inf:
9086 /* If arg is Inf or NaN and we're logb, return it. */
9087 if (TREE_CODE (rettype) == REAL_TYPE)
9088 return fold_convert_loc (loc, rettype, arg);
9089 /* Fall through... */
9090 case rvc_zero:
9091 /* Zero may set errno and/or raise an exception for logb, also
9092 for ilogb we don't know FP_ILOGB0. */
9093 return NULL_TREE;
9094 case rvc_normal:
9095 /* For normal numbers, proceed iff radix == 2. In GCC,
9096 normalized significands are in the range [0.5, 1.0). We
9097 want the exponent as if they were [1.0, 2.0) so get the
9098 exponent and subtract 1. */
9099 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9100 return fold_convert_loc (loc, rettype,
9101 build_int_cst (NULL_TREE,
9102 REAL_EXP (value)-1));
9103 break;
9107 return NULL_TREE;
9110 /* Fold a call to builtin significand, if radix == 2. */
9112 static tree
9113 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9115 if (! validate_arg (arg, REAL_TYPE))
9116 return NULL_TREE;
9118 STRIP_NOPS (arg);
9120 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9122 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9124 switch (value->cl)
9126 case rvc_zero:
9127 case rvc_nan:
9128 case rvc_inf:
9129 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9130 return fold_convert_loc (loc, rettype, arg);
9131 case rvc_normal:
9132 /* For normal numbers, proceed iff radix == 2. */
9133 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9135 REAL_VALUE_TYPE result = *value;
9136 /* In GCC, normalized significands are in the range [0.5,
9137 1.0). We want them to be [1.0, 2.0) so set the
9138 exponent to 1. */
9139 SET_REAL_EXP (&result, 1);
9140 return build_real (rettype, result);
9142 break;
9146 return NULL_TREE;
9149 /* Fold a call to builtin frexp, we can assume the base is 2. */
9151 static tree
9152 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9154 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9155 return NULL_TREE;
9157 STRIP_NOPS (arg0);
9159 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9160 return NULL_TREE;
9162 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9164 /* Proceed if a valid pointer type was passed in. */
9165 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9167 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9168 tree frac, exp;
9170 switch (value->cl)
9172 case rvc_zero:
9173 /* For +-0, return (*exp = 0, +-0). */
9174 exp = integer_zero_node;
9175 frac = arg0;
9176 break;
9177 case rvc_nan:
9178 case rvc_inf:
9179 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9180 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9181 case rvc_normal:
9183 /* Since the frexp function always expects base 2, and in
9184 GCC normalized significands are already in the range
9185 [0.5, 1.0), we have exactly what frexp wants. */
9186 REAL_VALUE_TYPE frac_rvt = *value;
9187 SET_REAL_EXP (&frac_rvt, 0);
9188 frac = build_real (rettype, frac_rvt);
9189 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9191 break;
9192 default:
9193 gcc_unreachable ();
9196 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9197 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9198 TREE_SIDE_EFFECTS (arg1) = 1;
9199 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9202 return NULL_TREE;
9205 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9206 then we can assume the base is two. If it's false, then we have to
9207 check the mode of the TYPE parameter in certain cases. */
9209 static tree
9210 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9211 tree type, bool ldexp)
9213 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9215 STRIP_NOPS (arg0);
9216 STRIP_NOPS (arg1);
9218 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9219 if (real_zerop (arg0) || integer_zerop (arg1)
9220 || (TREE_CODE (arg0) == REAL_CST
9221 && !real_isfinite (&TREE_REAL_CST (arg0))))
9222 return omit_one_operand_loc (loc, type, arg0, arg1);
9224 /* If both arguments are constant, then try to evaluate it. */
9225 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9226 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9227 && host_integerp (arg1, 0))
9229 /* Bound the maximum adjustment to twice the range of the
9230 mode's valid exponents. Use abs to ensure the range is
9231 positive as a sanity check. */
9232 const long max_exp_adj = 2 *
9233 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9234 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9236 /* Get the user-requested adjustment. */
9237 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9239 /* The requested adjustment must be inside this range. This
9240 is a preliminary cap to avoid things like overflow, we
9241 may still fail to compute the result for other reasons. */
9242 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9244 REAL_VALUE_TYPE initial_result;
9246 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9248 /* Ensure we didn't overflow. */
9249 if (! real_isinf (&initial_result))
9251 const REAL_VALUE_TYPE trunc_result
9252 = real_value_truncate (TYPE_MODE (type), initial_result);
9254 /* Only proceed if the target mode can hold the
9255 resulting value. */
9256 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9257 return build_real (type, trunc_result);
9263 return NULL_TREE;
9266 /* Fold a call to builtin modf. */
9268 static tree
9269 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9271 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9272 return NULL_TREE;
9274 STRIP_NOPS (arg0);
9276 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9277 return NULL_TREE;
9279 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9281 /* Proceed if a valid pointer type was passed in. */
9282 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9284 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9285 REAL_VALUE_TYPE trunc, frac;
9287 switch (value->cl)
9289 case rvc_nan:
9290 case rvc_zero:
9291 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9292 trunc = frac = *value;
9293 break;
9294 case rvc_inf:
9295 /* For +-Inf, return (*arg1 = arg0, +-0). */
9296 frac = dconst0;
9297 frac.sign = value->sign;
9298 trunc = *value;
9299 break;
9300 case rvc_normal:
9301 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9302 real_trunc (&trunc, VOIDmode, value);
9303 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9304 /* If the original number was negative and already
9305 integral, then the fractional part is -0.0. */
9306 if (value->sign && frac.cl == rvc_zero)
9307 frac.sign = value->sign;
9308 break;
9311 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9312 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9313 build_real (rettype, trunc));
9314 TREE_SIDE_EFFECTS (arg1) = 1;
9315 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9316 build_real (rettype, frac));
9319 return NULL_TREE;
9322 /* Given a location LOC, an interclass builtin function decl FNDECL
9323 and its single argument ARG, return an folded expression computing
9324 the same, or NULL_TREE if we either couldn't or didn't want to fold
9325 (the latter happen if there's an RTL instruction available). */
9327 static tree
9328 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9330 enum machine_mode mode;
9332 if (!validate_arg (arg, REAL_TYPE))
9333 return NULL_TREE;
9335 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9336 return NULL_TREE;
9338 mode = TYPE_MODE (TREE_TYPE (arg));
9340 /* If there is no optab, try generic code. */
9341 switch (DECL_FUNCTION_CODE (fndecl))
9343 tree result;
9345 CASE_FLT_FN (BUILT_IN_ISINF):
9347 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9348 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9349 tree const type = TREE_TYPE (arg);
9350 REAL_VALUE_TYPE r;
9351 char buf[128];
9353 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9354 real_from_string (&r, buf);
9355 result = build_call_expr (isgr_fn, 2,
9356 fold_build1_loc (loc, ABS_EXPR, type, arg),
9357 build_real (type, r));
9358 return result;
9360 CASE_FLT_FN (BUILT_IN_FINITE):
9361 case BUILT_IN_ISFINITE:
9363 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9364 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9365 tree const type = TREE_TYPE (arg);
9366 REAL_VALUE_TYPE r;
9367 char buf[128];
9369 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9370 real_from_string (&r, buf);
9371 result = build_call_expr (isle_fn, 2,
9372 fold_build1_loc (loc, ABS_EXPR, type, arg),
9373 build_real (type, r));
9374 /*result = fold_build2_loc (loc, UNGT_EXPR,
9375 TREE_TYPE (TREE_TYPE (fndecl)),
9376 fold_build1_loc (loc, ABS_EXPR, type, arg),
9377 build_real (type, r));
9378 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9379 TREE_TYPE (TREE_TYPE (fndecl)),
9380 result);*/
9381 return result;
9383 case BUILT_IN_ISNORMAL:
9385 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9386 islessequal(fabs(x),DBL_MAX). */
9387 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9388 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9389 tree const type = TREE_TYPE (arg);
9390 REAL_VALUE_TYPE rmax, rmin;
9391 char buf[128];
9393 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9394 real_from_string (&rmax, buf);
9395 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9396 real_from_string (&rmin, buf);
9397 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9398 result = build_call_expr (isle_fn, 2, arg,
9399 build_real (type, rmax));
9400 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9401 build_call_expr (isge_fn, 2, arg,
9402 build_real (type, rmin)));
9403 return result;
9405 default:
9406 break;
9409 return NULL_TREE;
9412 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9413 ARG is the argument for the call. */
9415 static tree
9416 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9418 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9419 REAL_VALUE_TYPE r;
9421 if (!validate_arg (arg, REAL_TYPE))
9422 return NULL_TREE;
9424 switch (builtin_index)
9426 case BUILT_IN_ISINF:
9427 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9428 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9430 if (TREE_CODE (arg) == REAL_CST)
9432 r = TREE_REAL_CST (arg);
9433 if (real_isinf (&r))
9434 return real_compare (GT_EXPR, &r, &dconst0)
9435 ? integer_one_node : integer_minus_one_node;
9436 else
9437 return integer_zero_node;
9440 return NULL_TREE;
9442 case BUILT_IN_ISINF_SIGN:
9444 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9445 /* In a boolean context, GCC will fold the inner COND_EXPR to
9446 1. So e.g. "if (isinf_sign(x))" would be folded to just
9447 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9448 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9449 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9450 tree tmp = NULL_TREE;
9452 arg = builtin_save_expr (arg);
9454 if (signbit_fn && isinf_fn)
9456 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9457 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9459 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9460 signbit_call, integer_zero_node);
9461 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9462 isinf_call, integer_zero_node);
9464 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9465 integer_minus_one_node, integer_one_node);
9466 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9467 isinf_call, tmp,
9468 integer_zero_node);
9471 return tmp;
9474 case BUILT_IN_ISFINITE:
9475 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9476 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9477 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9479 if (TREE_CODE (arg) == REAL_CST)
9481 r = TREE_REAL_CST (arg);
9482 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9485 return NULL_TREE;
9487 case BUILT_IN_ISNAN:
9488 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9489 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9491 if (TREE_CODE (arg) == REAL_CST)
9493 r = TREE_REAL_CST (arg);
9494 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9497 arg = builtin_save_expr (arg);
9498 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9500 default:
9501 gcc_unreachable ();
9505 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9506 This builtin will generate code to return the appropriate floating
9507 point classification depending on the value of the floating point
9508 number passed in. The possible return values must be supplied as
9509 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9510 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9511 one floating point argument which is "type generic". */
9513 static tree
9514 fold_builtin_fpclassify (location_t loc, tree exp)
9516 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9517 arg, type, res, tmp;
9518 enum machine_mode mode;
9519 REAL_VALUE_TYPE r;
9520 char buf[128];
9522 /* Verify the required arguments in the original call. */
9523 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9524 INTEGER_TYPE, INTEGER_TYPE,
9525 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9526 return NULL_TREE;
9528 fp_nan = CALL_EXPR_ARG (exp, 0);
9529 fp_infinite = CALL_EXPR_ARG (exp, 1);
9530 fp_normal = CALL_EXPR_ARG (exp, 2);
9531 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9532 fp_zero = CALL_EXPR_ARG (exp, 4);
9533 arg = CALL_EXPR_ARG (exp, 5);
9534 type = TREE_TYPE (arg);
9535 mode = TYPE_MODE (type);
9536 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9538 /* fpclassify(x) ->
9539 isnan(x) ? FP_NAN :
9540 (fabs(x) == Inf ? FP_INFINITE :
9541 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9542 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9544 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9545 build_real (type, dconst0));
9546 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9547 tmp, fp_zero, fp_subnormal);
9549 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9550 real_from_string (&r, buf);
9551 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9552 arg, build_real (type, r));
9553 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9555 if (HONOR_INFINITIES (mode))
9557 real_inf (&r);
9558 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9559 build_real (type, r));
9560 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9561 fp_infinite, res);
9564 if (HONOR_NANS (mode))
9566 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9567 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9570 return res;
9573 /* Fold a call to an unordered comparison function such as
9574 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9575 being called and ARG0 and ARG1 are the arguments for the call.
9576 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9577 the opposite of the desired result. UNORDERED_CODE is used
9578 for modes that can hold NaNs and ORDERED_CODE is used for
9579 the rest. */
9581 static tree
9582 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9583 enum tree_code unordered_code,
9584 enum tree_code ordered_code)
9586 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9587 enum tree_code code;
9588 tree type0, type1;
9589 enum tree_code code0, code1;
9590 tree cmp_type = NULL_TREE;
9592 type0 = TREE_TYPE (arg0);
9593 type1 = TREE_TYPE (arg1);
9595 code0 = TREE_CODE (type0);
9596 code1 = TREE_CODE (type1);
9598 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9599 /* Choose the wider of two real types. */
9600 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9601 ? type0 : type1;
9602 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9603 cmp_type = type0;
9604 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9605 cmp_type = type1;
9607 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9608 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9610 if (unordered_code == UNORDERED_EXPR)
9612 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9613 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9614 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9617 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9618 : ordered_code;
9619 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9620 fold_build2_loc (loc, code, type, arg0, arg1));
9623 /* Fold a call to built-in function FNDECL with 0 arguments.
9624 IGNORE is true if the result of the function call is ignored. This
9625 function returns NULL_TREE if no simplification was possible. */
9627 static tree
9628 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9630 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9631 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9632 switch (fcode)
9634 CASE_FLT_FN (BUILT_IN_INF):
9635 case BUILT_IN_INFD32:
9636 case BUILT_IN_INFD64:
9637 case BUILT_IN_INFD128:
9638 return fold_builtin_inf (loc, type, true);
9640 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9641 return fold_builtin_inf (loc, type, false);
9643 case BUILT_IN_CLASSIFY_TYPE:
9644 return fold_builtin_classify_type (NULL_TREE);
9646 default:
9647 break;
9649 return NULL_TREE;
9652 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9653 IGNORE is true if the result of the function call is ignored. This
9654 function returns NULL_TREE if no simplification was possible. */
9656 static tree
9657 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9659 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9660 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9661 switch (fcode)
9664 case BUILT_IN_CONSTANT_P:
9666 tree val = fold_builtin_constant_p (arg0);
9668 /* Gimplification will pull the CALL_EXPR for the builtin out of
9669 an if condition. When not optimizing, we'll not CSE it back.
9670 To avoid link error types of regressions, return false now. */
9671 if (!val && !optimize)
9672 val = integer_zero_node;
9674 return val;
9677 case BUILT_IN_CLASSIFY_TYPE:
9678 return fold_builtin_classify_type (arg0);
9680 case BUILT_IN_STRLEN:
9681 return fold_builtin_strlen (loc, type, arg0);
9683 CASE_FLT_FN (BUILT_IN_FABS):
9684 return fold_builtin_fabs (loc, arg0, type);
9686 case BUILT_IN_ABS:
9687 case BUILT_IN_LABS:
9688 case BUILT_IN_LLABS:
9689 case BUILT_IN_IMAXABS:
9690 return fold_builtin_abs (loc, arg0, type);
9692 CASE_FLT_FN (BUILT_IN_CONJ):
9693 if (validate_arg (arg0, COMPLEX_TYPE)
9694 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9695 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9696 break;
9698 CASE_FLT_FN (BUILT_IN_CREAL):
9699 if (validate_arg (arg0, COMPLEX_TYPE)
9700 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9701 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9702 break;
9704 CASE_FLT_FN (BUILT_IN_CIMAG):
9705 if (validate_arg (arg0, COMPLEX_TYPE)
9706 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9707 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9708 break;
9710 CASE_FLT_FN (BUILT_IN_CCOS):
9711 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9713 CASE_FLT_FN (BUILT_IN_CCOSH):
9714 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9716 CASE_FLT_FN (BUILT_IN_CSIN):
9717 if (validate_arg (arg0, COMPLEX_TYPE)
9718 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9719 return do_mpc_arg1 (arg0, type, mpc_sin);
9720 break;
9722 CASE_FLT_FN (BUILT_IN_CSINH):
9723 if (validate_arg (arg0, COMPLEX_TYPE)
9724 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9725 return do_mpc_arg1 (arg0, type, mpc_sinh);
9726 break;
9728 CASE_FLT_FN (BUILT_IN_CTAN):
9729 if (validate_arg (arg0, COMPLEX_TYPE)
9730 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9731 return do_mpc_arg1 (arg0, type, mpc_tan);
9732 break;
9734 CASE_FLT_FN (BUILT_IN_CTANH):
9735 if (validate_arg (arg0, COMPLEX_TYPE)
9736 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9737 return do_mpc_arg1 (arg0, type, mpc_tanh);
9738 break;
9740 CASE_FLT_FN (BUILT_IN_CLOG):
9741 if (validate_arg (arg0, COMPLEX_TYPE)
9742 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9743 return do_mpc_arg1 (arg0, type, mpc_log);
9744 break;
9746 CASE_FLT_FN (BUILT_IN_CSQRT):
9747 if (validate_arg (arg0, COMPLEX_TYPE)
9748 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9749 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9750 break;
9752 CASE_FLT_FN (BUILT_IN_CASIN):
9753 if (validate_arg (arg0, COMPLEX_TYPE)
9754 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9755 return do_mpc_arg1 (arg0, type, mpc_asin);
9756 break;
9758 CASE_FLT_FN (BUILT_IN_CACOS):
9759 if (validate_arg (arg0, COMPLEX_TYPE)
9760 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9761 return do_mpc_arg1 (arg0, type, mpc_acos);
9762 break;
9764 CASE_FLT_FN (BUILT_IN_CATAN):
9765 if (validate_arg (arg0, COMPLEX_TYPE)
9766 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9767 return do_mpc_arg1 (arg0, type, mpc_atan);
9768 break;
9770 CASE_FLT_FN (BUILT_IN_CASINH):
9771 if (validate_arg (arg0, COMPLEX_TYPE)
9772 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9773 return do_mpc_arg1 (arg0, type, mpc_asinh);
9774 break;
9776 CASE_FLT_FN (BUILT_IN_CACOSH):
9777 if (validate_arg (arg0, COMPLEX_TYPE)
9778 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9779 return do_mpc_arg1 (arg0, type, mpc_acosh);
9780 break;
9782 CASE_FLT_FN (BUILT_IN_CATANH):
9783 if (validate_arg (arg0, COMPLEX_TYPE)
9784 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9785 return do_mpc_arg1 (arg0, type, mpc_atanh);
9786 break;
9788 CASE_FLT_FN (BUILT_IN_CABS):
9789 return fold_builtin_cabs (loc, arg0, type, fndecl);
9791 CASE_FLT_FN (BUILT_IN_CARG):
9792 return fold_builtin_carg (loc, arg0, type);
9794 CASE_FLT_FN (BUILT_IN_SQRT):
9795 return fold_builtin_sqrt (loc, arg0, type);
9797 CASE_FLT_FN (BUILT_IN_CBRT):
9798 return fold_builtin_cbrt (loc, arg0, type);
9800 CASE_FLT_FN (BUILT_IN_ASIN):
9801 if (validate_arg (arg0, REAL_TYPE))
9802 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9803 &dconstm1, &dconst1, true);
9804 break;
9806 CASE_FLT_FN (BUILT_IN_ACOS):
9807 if (validate_arg (arg0, REAL_TYPE))
9808 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9809 &dconstm1, &dconst1, true);
9810 break;
9812 CASE_FLT_FN (BUILT_IN_ATAN):
9813 if (validate_arg (arg0, REAL_TYPE))
9814 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9815 break;
9817 CASE_FLT_FN (BUILT_IN_ASINH):
9818 if (validate_arg (arg0, REAL_TYPE))
9819 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9820 break;
9822 CASE_FLT_FN (BUILT_IN_ACOSH):
9823 if (validate_arg (arg0, REAL_TYPE))
9824 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9825 &dconst1, NULL, true);
9826 break;
9828 CASE_FLT_FN (BUILT_IN_ATANH):
9829 if (validate_arg (arg0, REAL_TYPE))
9830 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9831 &dconstm1, &dconst1, false);
9832 break;
9834 CASE_FLT_FN (BUILT_IN_SIN):
9835 if (validate_arg (arg0, REAL_TYPE))
9836 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9837 break;
9839 CASE_FLT_FN (BUILT_IN_COS):
9840 return fold_builtin_cos (loc, arg0, type, fndecl);
9842 CASE_FLT_FN (BUILT_IN_TAN):
9843 return fold_builtin_tan (arg0, type);
9845 CASE_FLT_FN (BUILT_IN_CEXP):
9846 return fold_builtin_cexp (loc, arg0, type);
9848 CASE_FLT_FN (BUILT_IN_CEXPI):
9849 if (validate_arg (arg0, REAL_TYPE))
9850 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9851 break;
9853 CASE_FLT_FN (BUILT_IN_SINH):
9854 if (validate_arg (arg0, REAL_TYPE))
9855 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9856 break;
9858 CASE_FLT_FN (BUILT_IN_COSH):
9859 return fold_builtin_cosh (loc, arg0, type, fndecl);
9861 CASE_FLT_FN (BUILT_IN_TANH):
9862 if (validate_arg (arg0, REAL_TYPE))
9863 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9864 break;
9866 CASE_FLT_FN (BUILT_IN_ERF):
9867 if (validate_arg (arg0, REAL_TYPE))
9868 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9869 break;
9871 CASE_FLT_FN (BUILT_IN_ERFC):
9872 if (validate_arg (arg0, REAL_TYPE))
9873 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9874 break;
9876 CASE_FLT_FN (BUILT_IN_TGAMMA):
9877 if (validate_arg (arg0, REAL_TYPE))
9878 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9879 break;
9881 CASE_FLT_FN (BUILT_IN_EXP):
9882 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9884 CASE_FLT_FN (BUILT_IN_EXP2):
9885 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9887 CASE_FLT_FN (BUILT_IN_EXP10):
9888 CASE_FLT_FN (BUILT_IN_POW10):
9889 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9891 CASE_FLT_FN (BUILT_IN_EXPM1):
9892 if (validate_arg (arg0, REAL_TYPE))
9893 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9894 break;
9896 CASE_FLT_FN (BUILT_IN_LOG):
9897 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9899 CASE_FLT_FN (BUILT_IN_LOG2):
9900 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9902 CASE_FLT_FN (BUILT_IN_LOG10):
9903 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9905 CASE_FLT_FN (BUILT_IN_LOG1P):
9906 if (validate_arg (arg0, REAL_TYPE))
9907 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9908 &dconstm1, NULL, false);
9909 break;
9911 CASE_FLT_FN (BUILT_IN_J0):
9912 if (validate_arg (arg0, REAL_TYPE))
9913 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9914 NULL, NULL, 0);
9915 break;
9917 CASE_FLT_FN (BUILT_IN_J1):
9918 if (validate_arg (arg0, REAL_TYPE))
9919 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9920 NULL, NULL, 0);
9921 break;
9923 CASE_FLT_FN (BUILT_IN_Y0):
9924 if (validate_arg (arg0, REAL_TYPE))
9925 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9926 &dconst0, NULL, false);
9927 break;
9929 CASE_FLT_FN (BUILT_IN_Y1):
9930 if (validate_arg (arg0, REAL_TYPE))
9931 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9932 &dconst0, NULL, false);
9933 break;
9935 CASE_FLT_FN (BUILT_IN_NAN):
9936 case BUILT_IN_NAND32:
9937 case BUILT_IN_NAND64:
9938 case BUILT_IN_NAND128:
9939 return fold_builtin_nan (arg0, type, true);
9941 CASE_FLT_FN (BUILT_IN_NANS):
9942 return fold_builtin_nan (arg0, type, false);
9944 CASE_FLT_FN (BUILT_IN_FLOOR):
9945 return fold_builtin_floor (loc, fndecl, arg0);
9947 CASE_FLT_FN (BUILT_IN_CEIL):
9948 return fold_builtin_ceil (loc, fndecl, arg0);
9950 CASE_FLT_FN (BUILT_IN_TRUNC):
9951 return fold_builtin_trunc (loc, fndecl, arg0);
9953 CASE_FLT_FN (BUILT_IN_ROUND):
9954 return fold_builtin_round (loc, fndecl, arg0);
9956 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9957 CASE_FLT_FN (BUILT_IN_RINT):
9958 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9960 CASE_FLT_FN (BUILT_IN_LCEIL):
9961 CASE_FLT_FN (BUILT_IN_LLCEIL):
9962 CASE_FLT_FN (BUILT_IN_LFLOOR):
9963 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9964 CASE_FLT_FN (BUILT_IN_LROUND):
9965 CASE_FLT_FN (BUILT_IN_LLROUND):
9966 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9968 CASE_FLT_FN (BUILT_IN_LRINT):
9969 CASE_FLT_FN (BUILT_IN_LLRINT):
9970 return fold_fixed_mathfn (loc, fndecl, arg0);
9972 case BUILT_IN_BSWAP32:
9973 case BUILT_IN_BSWAP64:
9974 return fold_builtin_bswap (fndecl, arg0);
9976 CASE_INT_FN (BUILT_IN_FFS):
9977 CASE_INT_FN (BUILT_IN_CLZ):
9978 CASE_INT_FN (BUILT_IN_CTZ):
9979 CASE_INT_FN (BUILT_IN_POPCOUNT):
9980 CASE_INT_FN (BUILT_IN_PARITY):
9981 return fold_builtin_bitop (fndecl, arg0);
9983 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9984 return fold_builtin_signbit (loc, arg0, type);
9986 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9987 return fold_builtin_significand (loc, arg0, type);
9989 CASE_FLT_FN (BUILT_IN_ILOGB):
9990 CASE_FLT_FN (BUILT_IN_LOGB):
9991 return fold_builtin_logb (loc, arg0, type);
9993 case BUILT_IN_ISASCII:
9994 return fold_builtin_isascii (loc, arg0);
9996 case BUILT_IN_TOASCII:
9997 return fold_builtin_toascii (loc, arg0);
9999 case BUILT_IN_ISDIGIT:
10000 return fold_builtin_isdigit (loc, arg0);
10002 CASE_FLT_FN (BUILT_IN_FINITE):
10003 case BUILT_IN_FINITED32:
10004 case BUILT_IN_FINITED64:
10005 case BUILT_IN_FINITED128:
10006 case BUILT_IN_ISFINITE:
10008 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10009 if (ret)
10010 return ret;
10011 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10014 CASE_FLT_FN (BUILT_IN_ISINF):
10015 case BUILT_IN_ISINFD32:
10016 case BUILT_IN_ISINFD64:
10017 case BUILT_IN_ISINFD128:
10019 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10020 if (ret)
10021 return ret;
10022 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10025 case BUILT_IN_ISNORMAL:
10026 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10028 case BUILT_IN_ISINF_SIGN:
10029 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10031 CASE_FLT_FN (BUILT_IN_ISNAN):
10032 case BUILT_IN_ISNAND32:
10033 case BUILT_IN_ISNAND64:
10034 case BUILT_IN_ISNAND128:
10035 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10037 case BUILT_IN_PRINTF:
10038 case BUILT_IN_PRINTF_UNLOCKED:
10039 case BUILT_IN_VPRINTF:
10040 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10042 default:
10043 break;
10046 return NULL_TREE;
10050 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10051 IGNORE is true if the result of the function call is ignored. This
10052 function returns NULL_TREE if no simplification was possible. */
10054 static tree
10055 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10057 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10058 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10060 switch (fcode)
10062 CASE_FLT_FN (BUILT_IN_JN):
10063 if (validate_arg (arg0, INTEGER_TYPE)
10064 && validate_arg (arg1, REAL_TYPE))
10065 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10066 break;
10068 CASE_FLT_FN (BUILT_IN_YN):
10069 if (validate_arg (arg0, INTEGER_TYPE)
10070 && validate_arg (arg1, REAL_TYPE))
10071 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10072 &dconst0, false);
10073 break;
10075 CASE_FLT_FN (BUILT_IN_DREM):
10076 CASE_FLT_FN (BUILT_IN_REMAINDER):
10077 if (validate_arg (arg0, REAL_TYPE)
10078 && validate_arg(arg1, REAL_TYPE))
10079 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10080 break;
10082 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10083 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10084 if (validate_arg (arg0, REAL_TYPE)
10085 && validate_arg(arg1, POINTER_TYPE))
10086 return do_mpfr_lgamma_r (arg0, arg1, type);
10087 break;
10089 CASE_FLT_FN (BUILT_IN_ATAN2):
10090 if (validate_arg (arg0, REAL_TYPE)
10091 && validate_arg(arg1, REAL_TYPE))
10092 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10093 break;
10095 CASE_FLT_FN (BUILT_IN_FDIM):
10096 if (validate_arg (arg0, REAL_TYPE)
10097 && validate_arg(arg1, REAL_TYPE))
10098 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10099 break;
10101 CASE_FLT_FN (BUILT_IN_HYPOT):
10102 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10104 CASE_FLT_FN (BUILT_IN_CPOW):
10105 if (validate_arg (arg0, COMPLEX_TYPE)
10106 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10107 && validate_arg (arg1, COMPLEX_TYPE)
10108 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10109 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10110 break;
10112 CASE_FLT_FN (BUILT_IN_LDEXP):
10113 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10114 CASE_FLT_FN (BUILT_IN_SCALBN):
10115 CASE_FLT_FN (BUILT_IN_SCALBLN):
10116 return fold_builtin_load_exponent (loc, arg0, arg1,
10117 type, /*ldexp=*/false);
10119 CASE_FLT_FN (BUILT_IN_FREXP):
10120 return fold_builtin_frexp (loc, arg0, arg1, type);
10122 CASE_FLT_FN (BUILT_IN_MODF):
10123 return fold_builtin_modf (loc, arg0, arg1, type);
10125 case BUILT_IN_BZERO:
10126 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10128 case BUILT_IN_FPUTS:
10129 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10131 case BUILT_IN_FPUTS_UNLOCKED:
10132 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10134 case BUILT_IN_STRSTR:
10135 return fold_builtin_strstr (loc, arg0, arg1, type);
10137 case BUILT_IN_STRCAT:
10138 return fold_builtin_strcat (loc, arg0, arg1);
10140 case BUILT_IN_STRSPN:
10141 return fold_builtin_strspn (loc, arg0, arg1);
10143 case BUILT_IN_STRCSPN:
10144 return fold_builtin_strcspn (loc, arg0, arg1);
10146 case BUILT_IN_STRCHR:
10147 case BUILT_IN_INDEX:
10148 return fold_builtin_strchr (loc, arg0, arg1, type);
10150 case BUILT_IN_STRRCHR:
10151 case BUILT_IN_RINDEX:
10152 return fold_builtin_strrchr (loc, arg0, arg1, type);
10154 case BUILT_IN_STRCPY:
10155 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10157 case BUILT_IN_STPCPY:
10158 if (ignore)
10160 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10161 if (!fn)
10162 break;
10164 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10166 else
10167 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10168 break;
10170 case BUILT_IN_STRCMP:
10171 return fold_builtin_strcmp (loc, arg0, arg1);
10173 case BUILT_IN_STRPBRK:
10174 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10176 case BUILT_IN_EXPECT:
10177 return fold_builtin_expect (loc, arg0, arg1);
10179 CASE_FLT_FN (BUILT_IN_POW):
10180 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10182 CASE_FLT_FN (BUILT_IN_POWI):
10183 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10185 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10186 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10188 CASE_FLT_FN (BUILT_IN_FMIN):
10189 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10191 CASE_FLT_FN (BUILT_IN_FMAX):
10192 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10194 case BUILT_IN_ISGREATER:
10195 return fold_builtin_unordered_cmp (loc, fndecl,
10196 arg0, arg1, UNLE_EXPR, LE_EXPR);
10197 case BUILT_IN_ISGREATEREQUAL:
10198 return fold_builtin_unordered_cmp (loc, fndecl,
10199 arg0, arg1, UNLT_EXPR, LT_EXPR);
10200 case BUILT_IN_ISLESS:
10201 return fold_builtin_unordered_cmp (loc, fndecl,
10202 arg0, arg1, UNGE_EXPR, GE_EXPR);
10203 case BUILT_IN_ISLESSEQUAL:
10204 return fold_builtin_unordered_cmp (loc, fndecl,
10205 arg0, arg1, UNGT_EXPR, GT_EXPR);
10206 case BUILT_IN_ISLESSGREATER:
10207 return fold_builtin_unordered_cmp (loc, fndecl,
10208 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10209 case BUILT_IN_ISUNORDERED:
10210 return fold_builtin_unordered_cmp (loc, fndecl,
10211 arg0, arg1, UNORDERED_EXPR,
10212 NOP_EXPR);
10214 /* We do the folding for va_start in the expander. */
10215 case BUILT_IN_VA_START:
10216 break;
10218 case BUILT_IN_SPRINTF:
10219 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10221 case BUILT_IN_OBJECT_SIZE:
10222 return fold_builtin_object_size (arg0, arg1);
10224 case BUILT_IN_PRINTF:
10225 case BUILT_IN_PRINTF_UNLOCKED:
10226 case BUILT_IN_VPRINTF:
10227 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10229 case BUILT_IN_PRINTF_CHK:
10230 case BUILT_IN_VPRINTF_CHK:
10231 if (!validate_arg (arg0, INTEGER_TYPE)
10232 || TREE_SIDE_EFFECTS (arg0))
10233 return NULL_TREE;
10234 else
10235 return fold_builtin_printf (loc, fndecl,
10236 arg1, NULL_TREE, ignore, fcode);
10237 break;
10239 case BUILT_IN_FPRINTF:
10240 case BUILT_IN_FPRINTF_UNLOCKED:
10241 case BUILT_IN_VFPRINTF:
10242 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10243 ignore, fcode);
10245 default:
10246 break;
10248 return NULL_TREE;
10251 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10252 and ARG2. IGNORE is true if the result of the function call is ignored.
10253 This function returns NULL_TREE if no simplification was possible. */
10255 static tree
10256 fold_builtin_3 (location_t loc, tree fndecl,
10257 tree arg0, tree arg1, tree arg2, bool ignore)
10259 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10260 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10261 switch (fcode)
10264 CASE_FLT_FN (BUILT_IN_SINCOS):
10265 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10267 CASE_FLT_FN (BUILT_IN_FMA):
10268 if (validate_arg (arg0, REAL_TYPE)
10269 && validate_arg(arg1, REAL_TYPE)
10270 && validate_arg(arg2, REAL_TYPE))
10271 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10272 break;
10274 CASE_FLT_FN (BUILT_IN_REMQUO):
10275 if (validate_arg (arg0, REAL_TYPE)
10276 && validate_arg(arg1, REAL_TYPE)
10277 && validate_arg(arg2, POINTER_TYPE))
10278 return do_mpfr_remquo (arg0, arg1, arg2);
10279 break;
10281 case BUILT_IN_MEMSET:
10282 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10284 case BUILT_IN_BCOPY:
10285 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10286 void_type_node, true, /*endp=*/3);
10288 case BUILT_IN_MEMCPY:
10289 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10290 type, ignore, /*endp=*/0);
10292 case BUILT_IN_MEMPCPY:
10293 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10294 type, ignore, /*endp=*/1);
10296 case BUILT_IN_MEMMOVE:
10297 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10298 type, ignore, /*endp=*/3);
10300 case BUILT_IN_STRNCAT:
10301 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10303 case BUILT_IN_STRNCPY:
10304 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10306 case BUILT_IN_STRNCMP:
10307 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10309 case BUILT_IN_MEMCHR:
10310 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10312 case BUILT_IN_BCMP:
10313 case BUILT_IN_MEMCMP:
10314 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10316 case BUILT_IN_SPRINTF:
10317 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10319 case BUILT_IN_STRCPY_CHK:
10320 case BUILT_IN_STPCPY_CHK:
10321 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10322 ignore, fcode);
10324 case BUILT_IN_STRCAT_CHK:
10325 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10327 case BUILT_IN_PRINTF_CHK:
10328 case BUILT_IN_VPRINTF_CHK:
10329 if (!validate_arg (arg0, INTEGER_TYPE)
10330 || TREE_SIDE_EFFECTS (arg0))
10331 return NULL_TREE;
10332 else
10333 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10334 break;
10336 case BUILT_IN_FPRINTF:
10337 case BUILT_IN_FPRINTF_UNLOCKED:
10338 case BUILT_IN_VFPRINTF:
10339 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10340 ignore, fcode);
10342 case BUILT_IN_FPRINTF_CHK:
10343 case BUILT_IN_VFPRINTF_CHK:
10344 if (!validate_arg (arg1, INTEGER_TYPE)
10345 || TREE_SIDE_EFFECTS (arg1))
10346 return NULL_TREE;
10347 else
10348 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10349 ignore, fcode);
10351 default:
10352 break;
10354 return NULL_TREE;
10357 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10358 ARG2, and ARG3. IGNORE is true if the result of the function call is
10359 ignored. This function returns NULL_TREE if no simplification was
10360 possible. */
10362 static tree
10363 fold_builtin_4 (location_t loc, tree fndecl,
10364 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10366 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10368 switch (fcode)
10370 case BUILT_IN_MEMCPY_CHK:
10371 case BUILT_IN_MEMPCPY_CHK:
10372 case BUILT_IN_MEMMOVE_CHK:
10373 case BUILT_IN_MEMSET_CHK:
10374 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10375 NULL_TREE, ignore,
10376 DECL_FUNCTION_CODE (fndecl));
10378 case BUILT_IN_STRNCPY_CHK:
10379 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10381 case BUILT_IN_STRNCAT_CHK:
10382 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10384 case BUILT_IN_FPRINTF_CHK:
10385 case BUILT_IN_VFPRINTF_CHK:
10386 if (!validate_arg (arg1, INTEGER_TYPE)
10387 || TREE_SIDE_EFFECTS (arg1))
10388 return NULL_TREE;
10389 else
10390 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10391 ignore, fcode);
10392 break;
10394 default:
10395 break;
10397 return NULL_TREE;
10400 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10401 arguments, where NARGS <= 4. IGNORE is true if the result of the
10402 function call is ignored. This function returns NULL_TREE if no
10403 simplification was possible. Note that this only folds builtins with
10404 fixed argument patterns. Foldings that do varargs-to-varargs
10405 transformations, or that match calls with more than 4 arguments,
10406 need to be handled with fold_builtin_varargs instead. */
10408 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10410 static tree
10411 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10413 tree ret = NULL_TREE;
10415 switch (nargs)
10417 case 0:
10418 ret = fold_builtin_0 (loc, fndecl, ignore);
10419 break;
10420 case 1:
10421 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10422 break;
10423 case 2:
10424 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10425 break;
10426 case 3:
10427 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10428 break;
10429 case 4:
10430 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10431 ignore);
10432 break;
10433 default:
10434 break;
10436 if (ret)
10438 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10439 SET_EXPR_LOCATION (ret, loc);
10440 TREE_NO_WARNING (ret) = 1;
10441 return ret;
10443 return NULL_TREE;
10446 /* Builtins with folding operations that operate on "..." arguments
10447 need special handling; we need to store the arguments in a convenient
10448 data structure before attempting any folding. Fortunately there are
10449 only a few builtins that fall into this category. FNDECL is the
10450 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10451 result of the function call is ignored. */
10453 static tree
10454 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10455 bool ignore ATTRIBUTE_UNUSED)
10457 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10458 tree ret = NULL_TREE;
10460 switch (fcode)
10462 case BUILT_IN_SPRINTF_CHK:
10463 case BUILT_IN_VSPRINTF_CHK:
10464 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10465 break;
10467 case BUILT_IN_SNPRINTF_CHK:
10468 case BUILT_IN_VSNPRINTF_CHK:
10469 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10470 break;
10472 case BUILT_IN_FPCLASSIFY:
10473 ret = fold_builtin_fpclassify (loc, exp);
10474 break;
10476 default:
10477 break;
10479 if (ret)
10481 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10482 SET_EXPR_LOCATION (ret, loc);
10483 TREE_NO_WARNING (ret) = 1;
10484 return ret;
10486 return NULL_TREE;
10489 /* Return true if FNDECL shouldn't be folded right now.
10490 If a built-in function has an inline attribute always_inline
10491 wrapper, defer folding it after always_inline functions have
10492 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10493 might not be performed. */
10495 static bool
10496 avoid_folding_inline_builtin (tree fndecl)
10498 return (DECL_DECLARED_INLINE_P (fndecl)
10499 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10500 && cfun
10501 && !cfun->always_inline_functions_inlined
10502 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10505 /* A wrapper function for builtin folding that prevents warnings for
10506 "statement without effect" and the like, caused by removing the
10507 call node earlier than the warning is generated. */
10509 tree
10510 fold_call_expr (location_t loc, tree exp, bool ignore)
10512 tree ret = NULL_TREE;
10513 tree fndecl = get_callee_fndecl (exp);
10514 if (fndecl
10515 && TREE_CODE (fndecl) == FUNCTION_DECL
10516 && DECL_BUILT_IN (fndecl)
10517 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10518 yet. Defer folding until we see all the arguments
10519 (after inlining). */
10520 && !CALL_EXPR_VA_ARG_PACK (exp))
10522 int nargs = call_expr_nargs (exp);
10524 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10525 instead last argument is __builtin_va_arg_pack (). Defer folding
10526 even in that case, until arguments are finalized. */
10527 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10529 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10530 if (fndecl2
10531 && TREE_CODE (fndecl2) == FUNCTION_DECL
10532 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10533 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10534 return NULL_TREE;
10537 if (avoid_folding_inline_builtin (fndecl))
10538 return NULL_TREE;
10540 /* FIXME: Don't use a list in this interface. */
10541 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10542 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10543 else
10545 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10547 tree *args = CALL_EXPR_ARGP (exp);
10548 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10550 if (!ret)
10551 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10552 if (ret)
10553 return ret;
10556 return NULL_TREE;
10559 /* Conveniently construct a function call expression. FNDECL names the
10560 function to be called and ARGLIST is a TREE_LIST of arguments. */
10562 tree
10563 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10565 tree fntype = TREE_TYPE (fndecl);
10566 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10567 int n = list_length (arglist);
10568 tree *argarray = (tree *) alloca (n * sizeof (tree));
10569 int i;
10571 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10572 argarray[i] = TREE_VALUE (arglist);
10573 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10576 /* Conveniently construct a function call expression. FNDECL names the
10577 function to be called, N is the number of arguments, and the "..."
10578 parameters are the argument expressions. */
10580 tree
10581 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10583 va_list ap;
10584 tree fntype = TREE_TYPE (fndecl);
10585 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10586 tree *argarray = (tree *) alloca (n * sizeof (tree));
10587 int i;
10589 va_start (ap, n);
10590 for (i = 0; i < n; i++)
10591 argarray[i] = va_arg (ap, tree);
10592 va_end (ap);
10593 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10596 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10597 N arguments are passed in the array ARGARRAY. */
10599 tree
10600 fold_builtin_call_array (location_t loc, tree type,
10601 tree fn,
10602 int n,
10603 tree *argarray)
10605 tree ret = NULL_TREE;
10606 int i;
10607 tree exp;
10609 if (TREE_CODE (fn) == ADDR_EXPR)
10611 tree fndecl = TREE_OPERAND (fn, 0);
10612 if (TREE_CODE (fndecl) == FUNCTION_DECL
10613 && DECL_BUILT_IN (fndecl))
10615 /* If last argument is __builtin_va_arg_pack (), arguments to this
10616 function are not finalized yet. Defer folding until they are. */
10617 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10619 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10620 if (fndecl2
10621 && TREE_CODE (fndecl2) == FUNCTION_DECL
10622 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10623 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10624 return build_call_array_loc (loc, type, fn, n, argarray);
10626 if (avoid_folding_inline_builtin (fndecl))
10627 return build_call_array_loc (loc, type, fn, n, argarray);
10628 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10630 tree arglist = NULL_TREE;
10631 for (i = n - 1; i >= 0; i--)
10632 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10633 ret = targetm.fold_builtin (fndecl, arglist, false);
10634 if (ret)
10635 return ret;
10636 return build_call_array_loc (loc, type, fn, n, argarray);
10638 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10640 /* First try the transformations that don't require consing up
10641 an exp. */
10642 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10643 if (ret)
10644 return ret;
10647 /* If we got this far, we need to build an exp. */
10648 exp = build_call_array_loc (loc, type, fn, n, argarray);
10649 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10650 return ret ? ret : exp;
10654 return build_call_array_loc (loc, type, fn, n, argarray);
10657 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10658 along with N new arguments specified as the "..." parameters. SKIP
10659 is the number of arguments in EXP to be omitted. This function is used
10660 to do varargs-to-varargs transformations. */
10662 static tree
10663 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10665 int oldnargs = call_expr_nargs (exp);
10666 int nargs = oldnargs - skip + n;
10667 tree fntype = TREE_TYPE (fndecl);
10668 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10669 tree *buffer;
10671 if (n > 0)
10673 int i, j;
10674 va_list ap;
10676 buffer = XALLOCAVEC (tree, nargs);
10677 va_start (ap, n);
10678 for (i = 0; i < n; i++)
10679 buffer[i] = va_arg (ap, tree);
10680 va_end (ap);
10681 for (j = skip; j < oldnargs; j++, i++)
10682 buffer[i] = CALL_EXPR_ARG (exp, j);
10684 else
10685 buffer = CALL_EXPR_ARGP (exp) + skip;
10687 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10690 /* Validate a single argument ARG against a tree code CODE representing
10691 a type. */
10693 static bool
10694 validate_arg (const_tree arg, enum tree_code code)
10696 if (!arg)
10697 return false;
10698 else if (code == POINTER_TYPE)
10699 return POINTER_TYPE_P (TREE_TYPE (arg));
10700 else if (code == INTEGER_TYPE)
10701 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10702 return code == TREE_CODE (TREE_TYPE (arg));
10705 /* This function validates the types of a function call argument list
10706 against a specified list of tree_codes. If the last specifier is a 0,
10707 that represents an ellipses, otherwise the last specifier must be a
10708 VOID_TYPE.
10710 This is the GIMPLE version of validate_arglist. Eventually we want to
10711 completely convert builtins.c to work from GIMPLEs and the tree based
10712 validate_arglist will then be removed. */
10714 bool
10715 validate_gimple_arglist (const_gimple call, ...)
10717 enum tree_code code;
10718 bool res = 0;
10719 va_list ap;
10720 const_tree arg;
10721 size_t i;
10723 va_start (ap, call);
10724 i = 0;
10728 code = (enum tree_code) va_arg (ap, int);
10729 switch (code)
10731 case 0:
10732 /* This signifies an ellipses, any further arguments are all ok. */
10733 res = true;
10734 goto end;
10735 case VOID_TYPE:
10736 /* This signifies an endlink, if no arguments remain, return
10737 true, otherwise return false. */
10738 res = (i == gimple_call_num_args (call));
10739 goto end;
10740 default:
10741 /* If no parameters remain or the parameter's code does not
10742 match the specified code, return false. Otherwise continue
10743 checking any remaining arguments. */
10744 arg = gimple_call_arg (call, i++);
10745 if (!validate_arg (arg, code))
10746 goto end;
10747 break;
10750 while (1);
10752 /* We need gotos here since we can only have one VA_CLOSE in a
10753 function. */
10754 end: ;
10755 va_end (ap);
10757 return res;
10760 /* This function validates the types of a function call argument list
10761 against a specified list of tree_codes. If the last specifier is a 0,
10762 that represents an ellipses, otherwise the last specifier must be a
10763 VOID_TYPE. */
10765 bool
10766 validate_arglist (const_tree callexpr, ...)
10768 enum tree_code code;
10769 bool res = 0;
10770 va_list ap;
10771 const_call_expr_arg_iterator iter;
10772 const_tree arg;
10774 va_start (ap, callexpr);
10775 init_const_call_expr_arg_iterator (callexpr, &iter);
10779 code = (enum tree_code) va_arg (ap, int);
10780 switch (code)
10782 case 0:
10783 /* This signifies an ellipses, any further arguments are all ok. */
10784 res = true;
10785 goto end;
10786 case VOID_TYPE:
10787 /* This signifies an endlink, if no arguments remain, return
10788 true, otherwise return false. */
10789 res = !more_const_call_expr_args_p (&iter);
10790 goto end;
10791 default:
10792 /* If no parameters remain or the parameter's code does not
10793 match the specified code, return false. Otherwise continue
10794 checking any remaining arguments. */
10795 arg = next_const_call_expr_arg (&iter);
10796 if (!validate_arg (arg, code))
10797 goto end;
10798 break;
10801 while (1);
10803 /* We need gotos here since we can only have one VA_CLOSE in a
10804 function. */
10805 end: ;
10806 va_end (ap);
10808 return res;
10811 /* Default target-specific builtin expander that does nothing. */
10814 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10815 rtx target ATTRIBUTE_UNUSED,
10816 rtx subtarget ATTRIBUTE_UNUSED,
10817 enum machine_mode mode ATTRIBUTE_UNUSED,
10818 int ignore ATTRIBUTE_UNUSED)
10820 return NULL_RTX;
10823 /* Returns true is EXP represents data that would potentially reside
10824 in a readonly section. */
10826 static bool
10827 readonly_data_expr (tree exp)
10829 STRIP_NOPS (exp);
10831 if (TREE_CODE (exp) != ADDR_EXPR)
10832 return false;
10834 exp = get_base_address (TREE_OPERAND (exp, 0));
10835 if (!exp)
10836 return false;
10838 /* Make sure we call decl_readonly_section only for trees it
10839 can handle (since it returns true for everything it doesn't
10840 understand). */
10841 if (TREE_CODE (exp) == STRING_CST
10842 || TREE_CODE (exp) == CONSTRUCTOR
10843 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10844 return decl_readonly_section (exp, 0);
10845 else
10846 return false;
10849 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10850 to the call, and TYPE is its return type.
10852 Return NULL_TREE if no simplification was possible, otherwise return the
10853 simplified form of the call as a tree.
10855 The simplified form may be a constant or other expression which
10856 computes the same value, but in a more efficient manner (including
10857 calls to other builtin functions).
10859 The call may contain arguments which need to be evaluated, but
10860 which are not useful to determine the result of the call. In
10861 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10862 COMPOUND_EXPR will be an argument which must be evaluated.
10863 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10864 COMPOUND_EXPR in the chain will contain the tree for the simplified
10865 form of the builtin function call. */
10867 static tree
10868 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10870 if (!validate_arg (s1, POINTER_TYPE)
10871 || !validate_arg (s2, POINTER_TYPE))
10872 return NULL_TREE;
10873 else
10875 tree fn;
10876 const char *p1, *p2;
10878 p2 = c_getstr (s2);
10879 if (p2 == NULL)
10880 return NULL_TREE;
10882 p1 = c_getstr (s1);
10883 if (p1 != NULL)
10885 const char *r = strstr (p1, p2);
10886 tree tem;
10888 if (r == NULL)
10889 return build_int_cst (TREE_TYPE (s1), 0);
10891 /* Return an offset into the constant string argument. */
10892 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10893 s1, size_int (r - p1));
10894 return fold_convert_loc (loc, type, tem);
10897 /* The argument is const char *, and the result is char *, so we need
10898 a type conversion here to avoid a warning. */
10899 if (p2[0] == '\0')
10900 return fold_convert_loc (loc, type, s1);
10902 if (p2[1] != '\0')
10903 return NULL_TREE;
10905 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10906 if (!fn)
10907 return NULL_TREE;
10909 /* New argument list transforming strstr(s1, s2) to
10910 strchr(s1, s2[0]). */
10911 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10915 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10916 the call, and TYPE is its return type.
10918 Return NULL_TREE if no simplification was possible, otherwise return the
10919 simplified form of the call as a tree.
10921 The simplified form may be a constant or other expression which
10922 computes the same value, but in a more efficient manner (including
10923 calls to other builtin functions).
10925 The call may contain arguments which need to be evaluated, but
10926 which are not useful to determine the result of the call. In
10927 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10928 COMPOUND_EXPR will be an argument which must be evaluated.
10929 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10930 COMPOUND_EXPR in the chain will contain the tree for the simplified
10931 form of the builtin function call. */
10933 static tree
10934 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10936 if (!validate_arg (s1, POINTER_TYPE)
10937 || !validate_arg (s2, INTEGER_TYPE))
10938 return NULL_TREE;
10939 else
10941 const char *p1;
10943 if (TREE_CODE (s2) != INTEGER_CST)
10944 return NULL_TREE;
10946 p1 = c_getstr (s1);
10947 if (p1 != NULL)
10949 char c;
10950 const char *r;
10951 tree tem;
10953 if (target_char_cast (s2, &c))
10954 return NULL_TREE;
10956 r = strchr (p1, c);
10958 if (r == NULL)
10959 return build_int_cst (TREE_TYPE (s1), 0);
10961 /* Return an offset into the constant string argument. */
10962 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10963 s1, size_int (r - p1));
10964 return fold_convert_loc (loc, type, tem);
10966 return NULL_TREE;
10970 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10971 the call, and TYPE is its return type.
10973 Return NULL_TREE if no simplification was possible, otherwise return the
10974 simplified form of the call as a tree.
10976 The simplified form may be a constant or other expression which
10977 computes the same value, but in a more efficient manner (including
10978 calls to other builtin functions).
10980 The call may contain arguments which need to be evaluated, but
10981 which are not useful to determine the result of the call. In
10982 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10983 COMPOUND_EXPR will be an argument which must be evaluated.
10984 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10985 COMPOUND_EXPR in the chain will contain the tree for the simplified
10986 form of the builtin function call. */
10988 static tree
10989 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10991 if (!validate_arg (s1, POINTER_TYPE)
10992 || !validate_arg (s2, INTEGER_TYPE))
10993 return NULL_TREE;
10994 else
10996 tree fn;
10997 const char *p1;
10999 if (TREE_CODE (s2) != INTEGER_CST)
11000 return NULL_TREE;
11002 p1 = c_getstr (s1);
11003 if (p1 != NULL)
11005 char c;
11006 const char *r;
11007 tree tem;
11009 if (target_char_cast (s2, &c))
11010 return NULL_TREE;
11012 r = strrchr (p1, c);
11014 if (r == NULL)
11015 return build_int_cst (TREE_TYPE (s1), 0);
11017 /* Return an offset into the constant string argument. */
11018 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11019 s1, size_int (r - p1));
11020 return fold_convert_loc (loc, type, tem);
11023 if (! integer_zerop (s2))
11024 return NULL_TREE;
11026 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11027 if (!fn)
11028 return NULL_TREE;
11030 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11031 return build_call_expr_loc (loc, fn, 2, s1, s2);
11035 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11036 to the call, and TYPE is its return type.
11038 Return NULL_TREE if no simplification was possible, otherwise return the
11039 simplified form of the call as a tree.
11041 The simplified form may be a constant or other expression which
11042 computes the same value, but in a more efficient manner (including
11043 calls to other builtin functions).
11045 The call may contain arguments which need to be evaluated, but
11046 which are not useful to determine the result of the call. In
11047 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11048 COMPOUND_EXPR will be an argument which must be evaluated.
11049 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11050 COMPOUND_EXPR in the chain will contain the tree for the simplified
11051 form of the builtin function call. */
11053 static tree
11054 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11056 if (!validate_arg (s1, POINTER_TYPE)
11057 || !validate_arg (s2, POINTER_TYPE))
11058 return NULL_TREE;
11059 else
11061 tree fn;
11062 const char *p1, *p2;
11064 p2 = c_getstr (s2);
11065 if (p2 == NULL)
11066 return NULL_TREE;
11068 p1 = c_getstr (s1);
11069 if (p1 != NULL)
11071 const char *r = strpbrk (p1, p2);
11072 tree tem;
11074 if (r == NULL)
11075 return build_int_cst (TREE_TYPE (s1), 0);
11077 /* Return an offset into the constant string argument. */
11078 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11079 s1, size_int (r - p1));
11080 return fold_convert_loc (loc, type, tem);
11083 if (p2[0] == '\0')
11084 /* strpbrk(x, "") == NULL.
11085 Evaluate and ignore s1 in case it had side-effects. */
11086 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11088 if (p2[1] != '\0')
11089 return NULL_TREE; /* Really call strpbrk. */
11091 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11092 if (!fn)
11093 return NULL_TREE;
11095 /* New argument list transforming strpbrk(s1, s2) to
11096 strchr(s1, s2[0]). */
11097 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11101 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11102 to the call.
11104 Return NULL_TREE if no simplification was possible, otherwise return the
11105 simplified form of the call as a tree.
11107 The simplified form may be a constant or other expression which
11108 computes the same value, but in a more efficient manner (including
11109 calls to other builtin functions).
11111 The call may contain arguments which need to be evaluated, but
11112 which are not useful to determine the result of the call. In
11113 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11114 COMPOUND_EXPR will be an argument which must be evaluated.
11115 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11116 COMPOUND_EXPR in the chain will contain the tree for the simplified
11117 form of the builtin function call. */
11119 static tree
11120 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11122 if (!validate_arg (dst, POINTER_TYPE)
11123 || !validate_arg (src, POINTER_TYPE))
11124 return NULL_TREE;
11125 else
11127 const char *p = c_getstr (src);
11129 /* If the string length is zero, return the dst parameter. */
11130 if (p && *p == '\0')
11131 return dst;
11133 if (optimize_insn_for_speed_p ())
11135 /* See if we can store by pieces into (dst + strlen(dst)). */
11136 tree newdst, call;
11137 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11138 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11140 if (!strlen_fn || !strcpy_fn)
11141 return NULL_TREE;
11143 /* If we don't have a movstr we don't want to emit an strcpy
11144 call. We have to do that if the length of the source string
11145 isn't computable (in that case we can use memcpy probably
11146 later expanding to a sequence of mov instructions). If we
11147 have movstr instructions we can emit strcpy calls. */
11148 if (!HAVE_movstr)
11150 tree len = c_strlen (src, 1);
11151 if (! len || TREE_SIDE_EFFECTS (len))
11152 return NULL_TREE;
11155 /* Stabilize the argument list. */
11156 dst = builtin_save_expr (dst);
11158 /* Create strlen (dst). */
11159 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11160 /* Create (dst p+ strlen (dst)). */
11162 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11163 TREE_TYPE (dst), dst, newdst);
11164 newdst = builtin_save_expr (newdst);
11166 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11167 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11169 return NULL_TREE;
11173 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11174 arguments to the call.
11176 Return NULL_TREE if no simplification was possible, otherwise return the
11177 simplified form of the call as a tree.
11179 The simplified form may be a constant or other expression which
11180 computes the same value, but in a more efficient manner (including
11181 calls to other builtin functions).
11183 The call may contain arguments which need to be evaluated, but
11184 which are not useful to determine the result of the call. In
11185 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11186 COMPOUND_EXPR will be an argument which must be evaluated.
11187 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11188 COMPOUND_EXPR in the chain will contain the tree for the simplified
11189 form of the builtin function call. */
11191 static tree
11192 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11194 if (!validate_arg (dst, POINTER_TYPE)
11195 || !validate_arg (src, POINTER_TYPE)
11196 || !validate_arg (len, INTEGER_TYPE))
11197 return NULL_TREE;
11198 else
11200 const char *p = c_getstr (src);
11202 /* If the requested length is zero, or the src parameter string
11203 length is zero, return the dst parameter. */
11204 if (integer_zerop (len) || (p && *p == '\0'))
11205 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11207 /* If the requested len is greater than or equal to the string
11208 length, call strcat. */
11209 if (TREE_CODE (len) == INTEGER_CST && p
11210 && compare_tree_int (len, strlen (p)) >= 0)
11212 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11214 /* If the replacement _DECL isn't initialized, don't do the
11215 transformation. */
11216 if (!fn)
11217 return NULL_TREE;
11219 return build_call_expr_loc (loc, fn, 2, dst, src);
11221 return NULL_TREE;
11225 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11226 to the call.
11228 Return NULL_TREE if no simplification was possible, otherwise return the
11229 simplified form of the call as a tree.
11231 The simplified form may be a constant or other expression which
11232 computes the same value, but in a more efficient manner (including
11233 calls to other builtin functions).
11235 The call may contain arguments which need to be evaluated, but
11236 which are not useful to determine the result of the call. In
11237 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11238 COMPOUND_EXPR will be an argument which must be evaluated.
11239 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11240 COMPOUND_EXPR in the chain will contain the tree for the simplified
11241 form of the builtin function call. */
11243 static tree
11244 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11246 if (!validate_arg (s1, POINTER_TYPE)
11247 || !validate_arg (s2, POINTER_TYPE))
11248 return NULL_TREE;
11249 else
11251 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11253 /* If both arguments are constants, evaluate at compile-time. */
11254 if (p1 && p2)
11256 const size_t r = strspn (p1, p2);
11257 return size_int (r);
11260 /* If either argument is "", return NULL_TREE. */
11261 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11262 /* Evaluate and ignore both arguments in case either one has
11263 side-effects. */
11264 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11265 s1, s2);
11266 return NULL_TREE;
11270 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11271 to the call.
11273 Return NULL_TREE if no simplification was possible, otherwise return the
11274 simplified form of the call as a tree.
11276 The simplified form may be a constant or other expression which
11277 computes the same value, but in a more efficient manner (including
11278 calls to other builtin functions).
11280 The call may contain arguments which need to be evaluated, but
11281 which are not useful to determine the result of the call. In
11282 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11283 COMPOUND_EXPR will be an argument which must be evaluated.
11284 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11285 COMPOUND_EXPR in the chain will contain the tree for the simplified
11286 form of the builtin function call. */
11288 static tree
11289 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11291 if (!validate_arg (s1, POINTER_TYPE)
11292 || !validate_arg (s2, POINTER_TYPE))
11293 return NULL_TREE;
11294 else
11296 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11298 /* If both arguments are constants, evaluate at compile-time. */
11299 if (p1 && p2)
11301 const size_t r = strcspn (p1, p2);
11302 return size_int (r);
11305 /* If the first argument is "", return NULL_TREE. */
11306 if (p1 && *p1 == '\0')
11308 /* Evaluate and ignore argument s2 in case it has
11309 side-effects. */
11310 return omit_one_operand_loc (loc, size_type_node,
11311 size_zero_node, s2);
11314 /* If the second argument is "", return __builtin_strlen(s1). */
11315 if (p2 && *p2 == '\0')
11317 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11319 /* If the replacement _DECL isn't initialized, don't do the
11320 transformation. */
11321 if (!fn)
11322 return NULL_TREE;
11324 return build_call_expr_loc (loc, fn, 1, s1);
11326 return NULL_TREE;
11330 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11331 to the call. IGNORE is true if the value returned
11332 by the builtin will be ignored. UNLOCKED is true is true if this
11333 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11334 the known length of the string. Return NULL_TREE if no simplification
11335 was possible. */
11337 tree
11338 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11339 bool ignore, bool unlocked, tree len)
11341 /* If we're using an unlocked function, assume the other unlocked
11342 functions exist explicitly. */
11343 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11344 : implicit_built_in_decls[BUILT_IN_FPUTC];
11345 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11346 : implicit_built_in_decls[BUILT_IN_FWRITE];
11348 /* If the return value is used, don't do the transformation. */
11349 if (!ignore)
11350 return NULL_TREE;
11352 /* Verify the arguments in the original call. */
11353 if (!validate_arg (arg0, POINTER_TYPE)
11354 || !validate_arg (arg1, POINTER_TYPE))
11355 return NULL_TREE;
11357 if (! len)
11358 len = c_strlen (arg0, 0);
11360 /* Get the length of the string passed to fputs. If the length
11361 can't be determined, punt. */
11362 if (!len
11363 || TREE_CODE (len) != INTEGER_CST)
11364 return NULL_TREE;
11366 switch (compare_tree_int (len, 1))
11368 case -1: /* length is 0, delete the call entirely . */
11369 return omit_one_operand_loc (loc, integer_type_node,
11370 integer_zero_node, arg1);;
11372 case 0: /* length is 1, call fputc. */
11374 const char *p = c_getstr (arg0);
11376 if (p != NULL)
11378 if (fn_fputc)
11379 return build_call_expr_loc (loc, fn_fputc, 2,
11380 build_int_cst (NULL_TREE, p[0]), arg1);
11381 else
11382 return NULL_TREE;
11385 /* FALLTHROUGH */
11386 case 1: /* length is greater than 1, call fwrite. */
11388 /* If optimizing for size keep fputs. */
11389 if (optimize_function_for_size_p (cfun))
11390 return NULL_TREE;
11391 /* New argument list transforming fputs(string, stream) to
11392 fwrite(string, 1, len, stream). */
11393 if (fn_fwrite)
11394 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11395 size_one_node, len, arg1);
11396 else
11397 return NULL_TREE;
11399 default:
11400 gcc_unreachable ();
11402 return NULL_TREE;
11405 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11406 produced. False otherwise. This is done so that we don't output the error
11407 or warning twice or three times. */
11409 bool
11410 fold_builtin_next_arg (tree exp, bool va_start_p)
11412 tree fntype = TREE_TYPE (current_function_decl);
11413 int nargs = call_expr_nargs (exp);
11414 tree arg;
11416 if (TYPE_ARG_TYPES (fntype) == 0
11417 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11418 == void_type_node))
11420 error ("%<va_start%> used in function with fixed args");
11421 return true;
11424 if (va_start_p)
11426 if (va_start_p && (nargs != 2))
11428 error ("wrong number of arguments to function %<va_start%>");
11429 return true;
11431 arg = CALL_EXPR_ARG (exp, 1);
11433 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11434 when we checked the arguments and if needed issued a warning. */
11435 else
11437 if (nargs == 0)
11439 /* Evidently an out of date version of <stdarg.h>; can't validate
11440 va_start's second argument, but can still work as intended. */
11441 warning (0, "%<__builtin_next_arg%> called without an argument");
11442 return true;
11444 else if (nargs > 1)
11446 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11447 return true;
11449 arg = CALL_EXPR_ARG (exp, 0);
11452 if (TREE_CODE (arg) == SSA_NAME)
11453 arg = SSA_NAME_VAR (arg);
11455 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11456 or __builtin_next_arg (0) the first time we see it, after checking
11457 the arguments and if needed issuing a warning. */
11458 if (!integer_zerop (arg))
11460 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11462 /* Strip off all nops for the sake of the comparison. This
11463 is not quite the same as STRIP_NOPS. It does more.
11464 We must also strip off INDIRECT_EXPR for C++ reference
11465 parameters. */
11466 while (CONVERT_EXPR_P (arg)
11467 || TREE_CODE (arg) == INDIRECT_REF)
11468 arg = TREE_OPERAND (arg, 0);
11469 if (arg != last_parm)
11471 /* FIXME: Sometimes with the tree optimizers we can get the
11472 not the last argument even though the user used the last
11473 argument. We just warn and set the arg to be the last
11474 argument so that we will get wrong-code because of
11475 it. */
11476 warning (0, "second parameter of %<va_start%> not last named argument");
11479 /* Undefined by C99 7.15.1.4p4 (va_start):
11480 "If the parameter parmN is declared with the register storage
11481 class, with a function or array type, or with a type that is
11482 not compatible with the type that results after application of
11483 the default argument promotions, the behavior is undefined."
11485 else if (DECL_REGISTER (arg))
11486 warning (0, "undefined behaviour when second parameter of "
11487 "%<va_start%> is declared with %<register%> storage");
11489 /* We want to verify the second parameter just once before the tree
11490 optimizers are run and then avoid keeping it in the tree,
11491 as otherwise we could warn even for correct code like:
11492 void foo (int i, ...)
11493 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11494 if (va_start_p)
11495 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11496 else
11497 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11499 return false;
11503 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11504 ORIG may be null if this is a 2-argument call. We don't attempt to
11505 simplify calls with more than 3 arguments.
11507 Return NULL_TREE if no simplification was possible, otherwise return the
11508 simplified form of the call as a tree. If IGNORED is true, it means that
11509 the caller does not use the returned value of the function. */
11511 static tree
11512 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11513 tree orig, int ignored)
11515 tree call, retval;
11516 const char *fmt_str = NULL;
11518 /* Verify the required arguments in the original call. We deal with two
11519 types of sprintf() calls: 'sprintf (str, fmt)' and
11520 'sprintf (dest, "%s", orig)'. */
11521 if (!validate_arg (dest, POINTER_TYPE)
11522 || !validate_arg (fmt, POINTER_TYPE))
11523 return NULL_TREE;
11524 if (orig && !validate_arg (orig, POINTER_TYPE))
11525 return NULL_TREE;
11527 /* Check whether the format is a literal string constant. */
11528 fmt_str = c_getstr (fmt);
11529 if (fmt_str == NULL)
11530 return NULL_TREE;
11532 call = NULL_TREE;
11533 retval = NULL_TREE;
11535 if (!init_target_chars ())
11536 return NULL_TREE;
11538 /* If the format doesn't contain % args or %%, use strcpy. */
11539 if (strchr (fmt_str, target_percent) == NULL)
11541 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11543 if (!fn)
11544 return NULL_TREE;
11546 /* Don't optimize sprintf (buf, "abc", ptr++). */
11547 if (orig)
11548 return NULL_TREE;
11550 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11551 'format' is known to contain no % formats. */
11552 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11553 if (!ignored)
11554 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11557 /* If the format is "%s", use strcpy if the result isn't used. */
11558 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11560 tree fn;
11561 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11563 if (!fn)
11564 return NULL_TREE;
11566 /* Don't crash on sprintf (str1, "%s"). */
11567 if (!orig)
11568 return NULL_TREE;
11570 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11571 if (!ignored)
11573 retval = c_strlen (orig, 1);
11574 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11575 return NULL_TREE;
11577 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11580 if (call && retval)
11582 retval = fold_convert_loc
11583 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11584 retval);
11585 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11587 else
11588 return call;
11591 /* Expand a call EXP to __builtin_object_size. */
11594 expand_builtin_object_size (tree exp)
11596 tree ost;
11597 int object_size_type;
11598 tree fndecl = get_callee_fndecl (exp);
11600 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11602 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11603 exp, fndecl);
11604 expand_builtin_trap ();
11605 return const0_rtx;
11608 ost = CALL_EXPR_ARG (exp, 1);
11609 STRIP_NOPS (ost);
11611 if (TREE_CODE (ost) != INTEGER_CST
11612 || tree_int_cst_sgn (ost) < 0
11613 || compare_tree_int (ost, 3) > 0)
11615 error ("%Klast argument of %D is not integer constant between 0 and 3",
11616 exp, fndecl);
11617 expand_builtin_trap ();
11618 return const0_rtx;
11621 object_size_type = tree_low_cst (ost, 0);
11623 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11626 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11627 FCODE is the BUILT_IN_* to use.
11628 Return NULL_RTX if we failed; the caller should emit a normal call,
11629 otherwise try to get the result in TARGET, if convenient (and in
11630 mode MODE if that's convenient). */
11632 static rtx
11633 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11634 enum built_in_function fcode)
11636 tree dest, src, len, size;
11638 if (!validate_arglist (exp,
11639 POINTER_TYPE,
11640 fcode == BUILT_IN_MEMSET_CHK
11641 ? INTEGER_TYPE : POINTER_TYPE,
11642 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11643 return NULL_RTX;
11645 dest = CALL_EXPR_ARG (exp, 0);
11646 src = CALL_EXPR_ARG (exp, 1);
11647 len = CALL_EXPR_ARG (exp, 2);
11648 size = CALL_EXPR_ARG (exp, 3);
11650 if (! host_integerp (size, 1))
11651 return NULL_RTX;
11653 if (host_integerp (len, 1) || integer_all_onesp (size))
11655 tree fn;
11657 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11659 warning_at (tree_nonartificial_location (exp),
11660 0, "%Kcall to %D will always overflow destination buffer",
11661 exp, get_callee_fndecl (exp));
11662 return NULL_RTX;
11665 fn = NULL_TREE;
11666 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11667 mem{cpy,pcpy,move,set} is available. */
11668 switch (fcode)
11670 case BUILT_IN_MEMCPY_CHK:
11671 fn = built_in_decls[BUILT_IN_MEMCPY];
11672 break;
11673 case BUILT_IN_MEMPCPY_CHK:
11674 fn = built_in_decls[BUILT_IN_MEMPCPY];
11675 break;
11676 case BUILT_IN_MEMMOVE_CHK:
11677 fn = built_in_decls[BUILT_IN_MEMMOVE];
11678 break;
11679 case BUILT_IN_MEMSET_CHK:
11680 fn = built_in_decls[BUILT_IN_MEMSET];
11681 break;
11682 default:
11683 break;
11686 if (! fn)
11687 return NULL_RTX;
11689 fn = build_call_nofold (fn, 3, dest, src, len);
11690 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11691 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11692 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11694 else if (fcode == BUILT_IN_MEMSET_CHK)
11695 return NULL_RTX;
11696 else
11698 unsigned int dest_align
11699 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11701 /* If DEST is not a pointer type, call the normal function. */
11702 if (dest_align == 0)
11703 return NULL_RTX;
11705 /* If SRC and DEST are the same (and not volatile), do nothing. */
11706 if (operand_equal_p (src, dest, 0))
11708 tree expr;
11710 if (fcode != BUILT_IN_MEMPCPY_CHK)
11712 /* Evaluate and ignore LEN in case it has side-effects. */
11713 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11714 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11717 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11718 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11721 /* __memmove_chk special case. */
11722 if (fcode == BUILT_IN_MEMMOVE_CHK)
11724 unsigned int src_align
11725 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11727 if (src_align == 0)
11728 return NULL_RTX;
11730 /* If src is categorized for a readonly section we can use
11731 normal __memcpy_chk. */
11732 if (readonly_data_expr (src))
11734 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11735 if (!fn)
11736 return NULL_RTX;
11737 fn = build_call_nofold (fn, 4, dest, src, len, size);
11738 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11739 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11740 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11743 return NULL_RTX;
11747 /* Emit warning if a buffer overflow is detected at compile time. */
11749 static void
11750 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11752 int is_strlen = 0;
11753 tree len, size;
11754 location_t loc = tree_nonartificial_location (exp);
11756 switch (fcode)
11758 case BUILT_IN_STRCPY_CHK:
11759 case BUILT_IN_STPCPY_CHK:
11760 /* For __strcat_chk the warning will be emitted only if overflowing
11761 by at least strlen (dest) + 1 bytes. */
11762 case BUILT_IN_STRCAT_CHK:
11763 len = CALL_EXPR_ARG (exp, 1);
11764 size = CALL_EXPR_ARG (exp, 2);
11765 is_strlen = 1;
11766 break;
11767 case BUILT_IN_STRNCAT_CHK:
11768 case BUILT_IN_STRNCPY_CHK:
11769 len = CALL_EXPR_ARG (exp, 2);
11770 size = CALL_EXPR_ARG (exp, 3);
11771 break;
11772 case BUILT_IN_SNPRINTF_CHK:
11773 case BUILT_IN_VSNPRINTF_CHK:
11774 len = CALL_EXPR_ARG (exp, 1);
11775 size = CALL_EXPR_ARG (exp, 3);
11776 break;
11777 default:
11778 gcc_unreachable ();
11781 if (!len || !size)
11782 return;
11784 if (! host_integerp (size, 1) || integer_all_onesp (size))
11785 return;
11787 if (is_strlen)
11789 len = c_strlen (len, 1);
11790 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11791 return;
11793 else if (fcode == BUILT_IN_STRNCAT_CHK)
11795 tree src = CALL_EXPR_ARG (exp, 1);
11796 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11797 return;
11798 src = c_strlen (src, 1);
11799 if (! src || ! host_integerp (src, 1))
11801 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11802 exp, get_callee_fndecl (exp));
11803 return;
11805 else if (tree_int_cst_lt (src, size))
11806 return;
11808 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11809 return;
11811 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11812 exp, get_callee_fndecl (exp));
11815 /* Emit warning if a buffer overflow is detected at compile time
11816 in __sprintf_chk/__vsprintf_chk calls. */
11818 static void
11819 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11821 tree size, len, fmt;
11822 const char *fmt_str;
11823 int nargs = call_expr_nargs (exp);
11825 /* Verify the required arguments in the original call. */
11827 if (nargs < 4)
11828 return;
11829 size = CALL_EXPR_ARG (exp, 2);
11830 fmt = CALL_EXPR_ARG (exp, 3);
11832 if (! host_integerp (size, 1) || integer_all_onesp (size))
11833 return;
11835 /* Check whether the format is a literal string constant. */
11836 fmt_str = c_getstr (fmt);
11837 if (fmt_str == NULL)
11838 return;
11840 if (!init_target_chars ())
11841 return;
11843 /* If the format doesn't contain % args or %%, we know its size. */
11844 if (strchr (fmt_str, target_percent) == 0)
11845 len = build_int_cstu (size_type_node, strlen (fmt_str));
11846 /* If the format is "%s" and first ... argument is a string literal,
11847 we know it too. */
11848 else if (fcode == BUILT_IN_SPRINTF_CHK
11849 && strcmp (fmt_str, target_percent_s) == 0)
11851 tree arg;
11853 if (nargs < 5)
11854 return;
11855 arg = CALL_EXPR_ARG (exp, 4);
11856 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11857 return;
11859 len = c_strlen (arg, 1);
11860 if (!len || ! host_integerp (len, 1))
11861 return;
11863 else
11864 return;
11866 if (! tree_int_cst_lt (len, size))
11867 warning_at (tree_nonartificial_location (exp),
11868 0, "%Kcall to %D will always overflow destination buffer",
11869 exp, get_callee_fndecl (exp));
11872 /* Emit warning if a free is called with address of a variable. */
11874 static void
11875 maybe_emit_free_warning (tree exp)
11877 tree arg = CALL_EXPR_ARG (exp, 0);
11879 STRIP_NOPS (arg);
11880 if (TREE_CODE (arg) != ADDR_EXPR)
11881 return;
11883 arg = get_base_address (TREE_OPERAND (arg, 0));
11884 if (arg == NULL || INDIRECT_REF_P (arg))
11885 return;
11887 if (SSA_VAR_P (arg))
11888 warning_at (tree_nonartificial_location (exp),
11889 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11890 else
11891 warning_at (tree_nonartificial_location (exp),
11892 0, "%Kattempt to free a non-heap object", exp);
11895 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11896 if possible. */
11898 tree
11899 fold_builtin_object_size (tree ptr, tree ost)
11901 tree ret = NULL_TREE;
11902 int object_size_type;
11904 if (!validate_arg (ptr, POINTER_TYPE)
11905 || !validate_arg (ost, INTEGER_TYPE))
11906 return NULL_TREE;
11908 STRIP_NOPS (ost);
11910 if (TREE_CODE (ost) != INTEGER_CST
11911 || tree_int_cst_sgn (ost) < 0
11912 || compare_tree_int (ost, 3) > 0)
11913 return NULL_TREE;
11915 object_size_type = tree_low_cst (ost, 0);
11917 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11918 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11919 and (size_t) 0 for types 2 and 3. */
11920 if (TREE_SIDE_EFFECTS (ptr))
11921 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11923 if (TREE_CODE (ptr) == ADDR_EXPR)
11924 ret = build_int_cstu (size_type_node,
11925 compute_builtin_object_size (ptr, object_size_type));
11927 else if (TREE_CODE (ptr) == SSA_NAME)
11929 unsigned HOST_WIDE_INT bytes;
11931 /* If object size is not known yet, delay folding until
11932 later. Maybe subsequent passes will help determining
11933 it. */
11934 bytes = compute_builtin_object_size (ptr, object_size_type);
11935 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11936 ? -1 : 0))
11937 ret = build_int_cstu (size_type_node, bytes);
11940 if (ret)
11942 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11943 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11944 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11945 ret = NULL_TREE;
11948 return ret;
11951 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11952 DEST, SRC, LEN, and SIZE are the arguments to the call.
11953 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11954 code of the builtin. If MAXLEN is not NULL, it is maximum length
11955 passed as third argument. */
11957 tree
11958 fold_builtin_memory_chk (location_t loc, tree fndecl,
11959 tree dest, tree src, tree len, tree size,
11960 tree maxlen, bool ignore,
11961 enum built_in_function fcode)
11963 tree fn;
11965 if (!validate_arg (dest, POINTER_TYPE)
11966 || !validate_arg (src,
11967 (fcode == BUILT_IN_MEMSET_CHK
11968 ? INTEGER_TYPE : POINTER_TYPE))
11969 || !validate_arg (len, INTEGER_TYPE)
11970 || !validate_arg (size, INTEGER_TYPE))
11971 return NULL_TREE;
11973 /* If SRC and DEST are the same (and not volatile), return DEST
11974 (resp. DEST+LEN for __mempcpy_chk). */
11975 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11977 if (fcode != BUILT_IN_MEMPCPY_CHK)
11978 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
11979 dest, len);
11980 else
11982 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
11983 dest, len);
11984 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
11988 if (! host_integerp (size, 1))
11989 return NULL_TREE;
11991 if (! integer_all_onesp (size))
11993 if (! host_integerp (len, 1))
11995 /* If LEN is not constant, try MAXLEN too.
11996 For MAXLEN only allow optimizing into non-_ocs function
11997 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11998 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12000 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12002 /* (void) __mempcpy_chk () can be optimized into
12003 (void) __memcpy_chk (). */
12004 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12005 if (!fn)
12006 return NULL_TREE;
12008 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12010 return NULL_TREE;
12013 else
12014 maxlen = len;
12016 if (tree_int_cst_lt (size, maxlen))
12017 return NULL_TREE;
12020 fn = NULL_TREE;
12021 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12022 mem{cpy,pcpy,move,set} is available. */
12023 switch (fcode)
12025 case BUILT_IN_MEMCPY_CHK:
12026 fn = built_in_decls[BUILT_IN_MEMCPY];
12027 break;
12028 case BUILT_IN_MEMPCPY_CHK:
12029 fn = built_in_decls[BUILT_IN_MEMPCPY];
12030 break;
12031 case BUILT_IN_MEMMOVE_CHK:
12032 fn = built_in_decls[BUILT_IN_MEMMOVE];
12033 break;
12034 case BUILT_IN_MEMSET_CHK:
12035 fn = built_in_decls[BUILT_IN_MEMSET];
12036 break;
12037 default:
12038 break;
12041 if (!fn)
12042 return NULL_TREE;
12044 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12047 /* Fold a call to the __st[rp]cpy_chk builtin.
12048 DEST, SRC, and SIZE are the arguments to the call.
12049 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12050 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12051 strings passed as second argument. */
12053 tree
12054 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12055 tree src, tree size,
12056 tree maxlen, bool ignore,
12057 enum built_in_function fcode)
12059 tree len, fn;
12061 if (!validate_arg (dest, POINTER_TYPE)
12062 || !validate_arg (src, POINTER_TYPE)
12063 || !validate_arg (size, INTEGER_TYPE))
12064 return NULL_TREE;
12066 /* If SRC and DEST are the same (and not volatile), return DEST. */
12067 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12068 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12070 if (! host_integerp (size, 1))
12071 return NULL_TREE;
12073 if (! integer_all_onesp (size))
12075 len = c_strlen (src, 1);
12076 if (! len || ! host_integerp (len, 1))
12078 /* If LEN is not constant, try MAXLEN too.
12079 For MAXLEN only allow optimizing into non-_ocs function
12080 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12081 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12083 if (fcode == BUILT_IN_STPCPY_CHK)
12085 if (! ignore)
12086 return NULL_TREE;
12088 /* If return value of __stpcpy_chk is ignored,
12089 optimize into __strcpy_chk. */
12090 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12091 if (!fn)
12092 return NULL_TREE;
12094 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12097 if (! len || TREE_SIDE_EFFECTS (len))
12098 return NULL_TREE;
12100 /* If c_strlen returned something, but not a constant,
12101 transform __strcpy_chk into __memcpy_chk. */
12102 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12103 if (!fn)
12104 return NULL_TREE;
12106 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12107 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12108 build_call_expr_loc (loc, fn, 4,
12109 dest, src, len, size));
12112 else
12113 maxlen = len;
12115 if (! tree_int_cst_lt (maxlen, size))
12116 return NULL_TREE;
12119 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12120 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12121 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12122 if (!fn)
12123 return NULL_TREE;
12125 return build_call_expr_loc (loc, fn, 2, dest, src);
12128 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12129 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12130 length passed as third argument. */
12132 tree
12133 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12134 tree len, tree size, tree maxlen)
12136 tree fn;
12138 if (!validate_arg (dest, POINTER_TYPE)
12139 || !validate_arg (src, POINTER_TYPE)
12140 || !validate_arg (len, INTEGER_TYPE)
12141 || !validate_arg (size, INTEGER_TYPE))
12142 return NULL_TREE;
12144 if (! host_integerp (size, 1))
12145 return NULL_TREE;
12147 if (! integer_all_onesp (size))
12149 if (! host_integerp (len, 1))
12151 /* If LEN is not constant, try MAXLEN too.
12152 For MAXLEN only allow optimizing into non-_ocs function
12153 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12154 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12155 return NULL_TREE;
12157 else
12158 maxlen = len;
12160 if (tree_int_cst_lt (size, maxlen))
12161 return NULL_TREE;
12164 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12165 fn = built_in_decls[BUILT_IN_STRNCPY];
12166 if (!fn)
12167 return NULL_TREE;
12169 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12172 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12173 are the arguments to the call. */
12175 static tree
12176 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12177 tree src, tree size)
12179 tree fn;
12180 const char *p;
12182 if (!validate_arg (dest, POINTER_TYPE)
12183 || !validate_arg (src, POINTER_TYPE)
12184 || !validate_arg (size, INTEGER_TYPE))
12185 return NULL_TREE;
12187 p = c_getstr (src);
12188 /* If the SRC parameter is "", return DEST. */
12189 if (p && *p == '\0')
12190 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12192 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12193 return NULL_TREE;
12195 /* If __builtin_strcat_chk is used, assume strcat is available. */
12196 fn = built_in_decls[BUILT_IN_STRCAT];
12197 if (!fn)
12198 return NULL_TREE;
12200 return build_call_expr_loc (loc, fn, 2, dest, src);
12203 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12204 LEN, and SIZE. */
12206 static tree
12207 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12208 tree dest, tree src, tree len, tree size)
12210 tree fn;
12211 const char *p;
12213 if (!validate_arg (dest, POINTER_TYPE)
12214 || !validate_arg (src, POINTER_TYPE)
12215 || !validate_arg (size, INTEGER_TYPE)
12216 || !validate_arg (size, INTEGER_TYPE))
12217 return NULL_TREE;
12219 p = c_getstr (src);
12220 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12221 if (p && *p == '\0')
12222 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12223 else if (integer_zerop (len))
12224 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12226 if (! host_integerp (size, 1))
12227 return NULL_TREE;
12229 if (! integer_all_onesp (size))
12231 tree src_len = c_strlen (src, 1);
12232 if (src_len
12233 && host_integerp (src_len, 1)
12234 && host_integerp (len, 1)
12235 && ! tree_int_cst_lt (len, src_len))
12237 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12238 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12239 if (!fn)
12240 return NULL_TREE;
12242 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12244 return NULL_TREE;
12247 /* If __builtin_strncat_chk is used, assume strncat is available. */
12248 fn = built_in_decls[BUILT_IN_STRNCAT];
12249 if (!fn)
12250 return NULL_TREE;
12252 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12255 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12256 a normal call should be emitted rather than expanding the function
12257 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12259 static tree
12260 fold_builtin_sprintf_chk (location_t loc, tree exp,
12261 enum built_in_function fcode)
12263 tree dest, size, len, fn, fmt, flag;
12264 const char *fmt_str;
12265 int nargs = call_expr_nargs (exp);
12267 /* Verify the required arguments in the original call. */
12268 if (nargs < 4)
12269 return NULL_TREE;
12270 dest = CALL_EXPR_ARG (exp, 0);
12271 if (!validate_arg (dest, POINTER_TYPE))
12272 return NULL_TREE;
12273 flag = CALL_EXPR_ARG (exp, 1);
12274 if (!validate_arg (flag, INTEGER_TYPE))
12275 return NULL_TREE;
12276 size = CALL_EXPR_ARG (exp, 2);
12277 if (!validate_arg (size, INTEGER_TYPE))
12278 return NULL_TREE;
12279 fmt = CALL_EXPR_ARG (exp, 3);
12280 if (!validate_arg (fmt, POINTER_TYPE))
12281 return NULL_TREE;
12283 if (! host_integerp (size, 1))
12284 return NULL_TREE;
12286 len = NULL_TREE;
12288 if (!init_target_chars ())
12289 return NULL_TREE;
12291 /* Check whether the format is a literal string constant. */
12292 fmt_str = c_getstr (fmt);
12293 if (fmt_str != NULL)
12295 /* If the format doesn't contain % args or %%, we know the size. */
12296 if (strchr (fmt_str, target_percent) == 0)
12298 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12299 len = build_int_cstu (size_type_node, strlen (fmt_str));
12301 /* If the format is "%s" and first ... argument is a string literal,
12302 we know the size too. */
12303 else if (fcode == BUILT_IN_SPRINTF_CHK
12304 && strcmp (fmt_str, target_percent_s) == 0)
12306 tree arg;
12308 if (nargs == 5)
12310 arg = CALL_EXPR_ARG (exp, 4);
12311 if (validate_arg (arg, POINTER_TYPE))
12313 len = c_strlen (arg, 1);
12314 if (! len || ! host_integerp (len, 1))
12315 len = NULL_TREE;
12321 if (! integer_all_onesp (size))
12323 if (! len || ! tree_int_cst_lt (len, size))
12324 return NULL_TREE;
12327 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12328 or if format doesn't contain % chars or is "%s". */
12329 if (! integer_zerop (flag))
12331 if (fmt_str == NULL)
12332 return NULL_TREE;
12333 if (strchr (fmt_str, target_percent) != NULL
12334 && strcmp (fmt_str, target_percent_s))
12335 return NULL_TREE;
12338 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12339 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12340 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12341 if (!fn)
12342 return NULL_TREE;
12344 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12347 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12348 a normal call should be emitted rather than expanding the function
12349 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12350 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12351 passed as second argument. */
12353 tree
12354 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12355 enum built_in_function fcode)
12357 tree dest, size, len, fn, fmt, flag;
12358 const char *fmt_str;
12360 /* Verify the required arguments in the original call. */
12361 if (call_expr_nargs (exp) < 5)
12362 return NULL_TREE;
12363 dest = CALL_EXPR_ARG (exp, 0);
12364 if (!validate_arg (dest, POINTER_TYPE))
12365 return NULL_TREE;
12366 len = CALL_EXPR_ARG (exp, 1);
12367 if (!validate_arg (len, INTEGER_TYPE))
12368 return NULL_TREE;
12369 flag = CALL_EXPR_ARG (exp, 2);
12370 if (!validate_arg (flag, INTEGER_TYPE))
12371 return NULL_TREE;
12372 size = CALL_EXPR_ARG (exp, 3);
12373 if (!validate_arg (size, INTEGER_TYPE))
12374 return NULL_TREE;
12375 fmt = CALL_EXPR_ARG (exp, 4);
12376 if (!validate_arg (fmt, POINTER_TYPE))
12377 return NULL_TREE;
12379 if (! host_integerp (size, 1))
12380 return NULL_TREE;
12382 if (! integer_all_onesp (size))
12384 if (! host_integerp (len, 1))
12386 /* If LEN is not constant, try MAXLEN too.
12387 For MAXLEN only allow optimizing into non-_ocs function
12388 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12389 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12390 return NULL_TREE;
12392 else
12393 maxlen = len;
12395 if (tree_int_cst_lt (size, maxlen))
12396 return NULL_TREE;
12399 if (!init_target_chars ())
12400 return NULL_TREE;
12402 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12403 or if format doesn't contain % chars or is "%s". */
12404 if (! integer_zerop (flag))
12406 fmt_str = c_getstr (fmt);
12407 if (fmt_str == NULL)
12408 return NULL_TREE;
12409 if (strchr (fmt_str, target_percent) != NULL
12410 && strcmp (fmt_str, target_percent_s))
12411 return NULL_TREE;
12414 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12415 available. */
12416 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12417 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12418 if (!fn)
12419 return NULL_TREE;
12421 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12424 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12425 FMT and ARG are the arguments to the call; we don't fold cases with
12426 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12428 Return NULL_TREE if no simplification was possible, otherwise return the
12429 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12430 code of the function to be simplified. */
12432 static tree
12433 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12434 tree arg, bool ignore,
12435 enum built_in_function fcode)
12437 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12438 const char *fmt_str = NULL;
12440 /* If the return value is used, don't do the transformation. */
12441 if (! ignore)
12442 return NULL_TREE;
12444 /* Verify the required arguments in the original call. */
12445 if (!validate_arg (fmt, POINTER_TYPE))
12446 return NULL_TREE;
12448 /* Check whether the format is a literal string constant. */
12449 fmt_str = c_getstr (fmt);
12450 if (fmt_str == NULL)
12451 return NULL_TREE;
12453 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12455 /* If we're using an unlocked function, assume the other
12456 unlocked functions exist explicitly. */
12457 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12458 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12460 else
12462 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12463 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12466 if (!init_target_chars ())
12467 return NULL_TREE;
12469 if (strcmp (fmt_str, target_percent_s) == 0
12470 || strchr (fmt_str, target_percent) == NULL)
12472 const char *str;
12474 if (strcmp (fmt_str, target_percent_s) == 0)
12476 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12477 return NULL_TREE;
12479 if (!arg || !validate_arg (arg, POINTER_TYPE))
12480 return NULL_TREE;
12482 str = c_getstr (arg);
12483 if (str == NULL)
12484 return NULL_TREE;
12486 else
12488 /* The format specifier doesn't contain any '%' characters. */
12489 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12490 && arg)
12491 return NULL_TREE;
12492 str = fmt_str;
12495 /* If the string was "", printf does nothing. */
12496 if (str[0] == '\0')
12497 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12499 /* If the string has length of 1, call putchar. */
12500 if (str[1] == '\0')
12502 /* Given printf("c"), (where c is any one character,)
12503 convert "c"[0] to an int and pass that to the replacement
12504 function. */
12505 newarg = build_int_cst (NULL_TREE, str[0]);
12506 if (fn_putchar)
12507 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12509 else
12511 /* If the string was "string\n", call puts("string"). */
12512 size_t len = strlen (str);
12513 if ((unsigned char)str[len - 1] == target_newline)
12515 /* Create a NUL-terminated string that's one char shorter
12516 than the original, stripping off the trailing '\n'. */
12517 char *newstr = XALLOCAVEC (char, len);
12518 memcpy (newstr, str, len - 1);
12519 newstr[len - 1] = 0;
12521 newarg = build_string_literal (len, newstr);
12522 if (fn_puts)
12523 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12525 else
12526 /* We'd like to arrange to call fputs(string,stdout) here,
12527 but we need stdout and don't have a way to get it yet. */
12528 return NULL_TREE;
12532 /* The other optimizations can be done only on the non-va_list variants. */
12533 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12534 return NULL_TREE;
12536 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12537 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12539 if (!arg || !validate_arg (arg, POINTER_TYPE))
12540 return NULL_TREE;
12541 if (fn_puts)
12542 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12545 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12546 else if (strcmp (fmt_str, target_percent_c) == 0)
12548 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12549 return NULL_TREE;
12550 if (fn_putchar)
12551 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12554 if (!call)
12555 return NULL_TREE;
12557 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12560 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12561 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12562 more than 3 arguments, and ARG may be null in the 2-argument case.
12564 Return NULL_TREE if no simplification was possible, otherwise return the
12565 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12566 code of the function to be simplified. */
12568 static tree
12569 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12570 tree fmt, tree arg, bool ignore,
12571 enum built_in_function fcode)
12573 tree fn_fputc, fn_fputs, call = NULL_TREE;
12574 const char *fmt_str = NULL;
12576 /* If the return value is used, don't do the transformation. */
12577 if (! ignore)
12578 return NULL_TREE;
12580 /* Verify the required arguments in the original call. */
12581 if (!validate_arg (fp, POINTER_TYPE))
12582 return NULL_TREE;
12583 if (!validate_arg (fmt, POINTER_TYPE))
12584 return NULL_TREE;
12586 /* Check whether the format is a literal string constant. */
12587 fmt_str = c_getstr (fmt);
12588 if (fmt_str == NULL)
12589 return NULL_TREE;
12591 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12593 /* If we're using an unlocked function, assume the other
12594 unlocked functions exist explicitly. */
12595 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12596 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12598 else
12600 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12601 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12604 if (!init_target_chars ())
12605 return NULL_TREE;
12607 /* If the format doesn't contain % args or %%, use strcpy. */
12608 if (strchr (fmt_str, target_percent) == NULL)
12610 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12611 && arg)
12612 return NULL_TREE;
12614 /* If the format specifier was "", fprintf does nothing. */
12615 if (fmt_str[0] == '\0')
12617 /* If FP has side-effects, just wait until gimplification is
12618 done. */
12619 if (TREE_SIDE_EFFECTS (fp))
12620 return NULL_TREE;
12622 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12625 /* When "string" doesn't contain %, replace all cases of
12626 fprintf (fp, string) with fputs (string, fp). The fputs
12627 builtin will take care of special cases like length == 1. */
12628 if (fn_fputs)
12629 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12632 /* The other optimizations can be done only on the non-va_list variants. */
12633 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12634 return NULL_TREE;
12636 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12637 else if (strcmp (fmt_str, target_percent_s) == 0)
12639 if (!arg || !validate_arg (arg, POINTER_TYPE))
12640 return NULL_TREE;
12641 if (fn_fputs)
12642 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12645 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12646 else if (strcmp (fmt_str, target_percent_c) == 0)
12648 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12649 return NULL_TREE;
12650 if (fn_fputc)
12651 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12654 if (!call)
12655 return NULL_TREE;
12656 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12659 /* Initialize format string characters in the target charset. */
12661 static bool
12662 init_target_chars (void)
12664 static bool init;
12665 if (!init)
12667 target_newline = lang_hooks.to_target_charset ('\n');
12668 target_percent = lang_hooks.to_target_charset ('%');
12669 target_c = lang_hooks.to_target_charset ('c');
12670 target_s = lang_hooks.to_target_charset ('s');
12671 if (target_newline == 0 || target_percent == 0 || target_c == 0
12672 || target_s == 0)
12673 return false;
12675 target_percent_c[0] = target_percent;
12676 target_percent_c[1] = target_c;
12677 target_percent_c[2] = '\0';
12679 target_percent_s[0] = target_percent;
12680 target_percent_s[1] = target_s;
12681 target_percent_s[2] = '\0';
12683 target_percent_s_newline[0] = target_percent;
12684 target_percent_s_newline[1] = target_s;
12685 target_percent_s_newline[2] = target_newline;
12686 target_percent_s_newline[3] = '\0';
12688 init = true;
12690 return true;
12693 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12694 and no overflow/underflow occurred. INEXACT is true if M was not
12695 exactly calculated. TYPE is the tree type for the result. This
12696 function assumes that you cleared the MPFR flags and then
12697 calculated M to see if anything subsequently set a flag prior to
12698 entering this function. Return NULL_TREE if any checks fail. */
12700 static tree
12701 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12703 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12704 overflow/underflow occurred. If -frounding-math, proceed iff the
12705 result of calling FUNC was exact. */
12706 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12707 && (!flag_rounding_math || !inexact))
12709 REAL_VALUE_TYPE rr;
12711 real_from_mpfr (&rr, m, type, GMP_RNDN);
12712 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12713 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12714 but the mpft_t is not, then we underflowed in the
12715 conversion. */
12716 if (real_isfinite (&rr)
12717 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12719 REAL_VALUE_TYPE rmode;
12721 real_convert (&rmode, TYPE_MODE (type), &rr);
12722 /* Proceed iff the specified mode can hold the value. */
12723 if (real_identical (&rmode, &rr))
12724 return build_real (type, rmode);
12727 return NULL_TREE;
12730 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12731 number and no overflow/underflow occurred. INEXACT is true if M
12732 was not exactly calculated. TYPE is the tree type for the result.
12733 This function assumes that you cleared the MPFR flags and then
12734 calculated M to see if anything subsequently set a flag prior to
12735 entering this function. Return NULL_TREE if any checks fail, if
12736 FORCE_CONVERT is true, then bypass the checks. */
12738 static tree
12739 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12741 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12742 overflow/underflow occurred. If -frounding-math, proceed iff the
12743 result of calling FUNC was exact. */
12744 if (force_convert
12745 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12746 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12747 && (!flag_rounding_math || !inexact)))
12749 REAL_VALUE_TYPE re, im;
12751 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12752 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12753 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12754 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12755 but the mpft_t is not, then we underflowed in the
12756 conversion. */
12757 if (force_convert
12758 || (real_isfinite (&re) && real_isfinite (&im)
12759 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12760 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12762 REAL_VALUE_TYPE re_mode, im_mode;
12764 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12765 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12766 /* Proceed iff the specified mode can hold the value. */
12767 if (force_convert
12768 || (real_identical (&re_mode, &re)
12769 && real_identical (&im_mode, &im)))
12770 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12771 build_real (TREE_TYPE (type), im_mode));
12774 return NULL_TREE;
12777 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12778 FUNC on it and return the resulting value as a tree with type TYPE.
12779 If MIN and/or MAX are not NULL, then the supplied ARG must be
12780 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12781 acceptable values, otherwise they are not. The mpfr precision is
12782 set to the precision of TYPE. We assume that function FUNC returns
12783 zero if the result could be calculated exactly within the requested
12784 precision. */
12786 static tree
12787 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12788 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12789 bool inclusive)
12791 tree result = NULL_TREE;
12793 STRIP_NOPS (arg);
12795 /* To proceed, MPFR must exactly represent the target floating point
12796 format, which only happens when the target base equals two. */
12797 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12798 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12800 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12802 if (real_isfinite (ra)
12803 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12804 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12806 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12807 const int prec = fmt->p;
12808 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12809 int inexact;
12810 mpfr_t m;
12812 mpfr_init2 (m, prec);
12813 mpfr_from_real (m, ra, GMP_RNDN);
12814 mpfr_clear_flags ();
12815 inexact = func (m, m, rnd);
12816 result = do_mpfr_ckconv (m, type, inexact);
12817 mpfr_clear (m);
12821 return result;
12824 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12825 FUNC on it and return the resulting value as a tree with type TYPE.
12826 The mpfr precision is set to the precision of TYPE. We assume that
12827 function FUNC returns zero if the result could be calculated
12828 exactly within the requested precision. */
12830 static tree
12831 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12832 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12834 tree result = NULL_TREE;
12836 STRIP_NOPS (arg1);
12837 STRIP_NOPS (arg2);
12839 /* To proceed, MPFR must exactly represent the target floating point
12840 format, which only happens when the target base equals two. */
12841 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12842 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12843 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12845 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12846 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12848 if (real_isfinite (ra1) && real_isfinite (ra2))
12850 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12851 const int prec = fmt->p;
12852 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12853 int inexact;
12854 mpfr_t m1, m2;
12856 mpfr_inits2 (prec, m1, m2, NULL);
12857 mpfr_from_real (m1, ra1, GMP_RNDN);
12858 mpfr_from_real (m2, ra2, GMP_RNDN);
12859 mpfr_clear_flags ();
12860 inexact = func (m1, m1, m2, rnd);
12861 result = do_mpfr_ckconv (m1, type, inexact);
12862 mpfr_clears (m1, m2, NULL);
12866 return result;
12869 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12870 FUNC on it and return the resulting value as a tree with type TYPE.
12871 The mpfr precision is set to the precision of TYPE. We assume that
12872 function FUNC returns zero if the result could be calculated
12873 exactly within the requested precision. */
12875 static tree
12876 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12877 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12879 tree result = NULL_TREE;
12881 STRIP_NOPS (arg1);
12882 STRIP_NOPS (arg2);
12883 STRIP_NOPS (arg3);
12885 /* To proceed, MPFR must exactly represent the target floating point
12886 format, which only happens when the target base equals two. */
12887 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12888 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12889 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12890 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12892 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12893 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12894 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12896 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12898 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12899 const int prec = fmt->p;
12900 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12901 int inexact;
12902 mpfr_t m1, m2, m3;
12904 mpfr_inits2 (prec, m1, m2, m3, NULL);
12905 mpfr_from_real (m1, ra1, GMP_RNDN);
12906 mpfr_from_real (m2, ra2, GMP_RNDN);
12907 mpfr_from_real (m3, ra3, GMP_RNDN);
12908 mpfr_clear_flags ();
12909 inexact = func (m1, m1, m2, m3, rnd);
12910 result = do_mpfr_ckconv (m1, type, inexact);
12911 mpfr_clears (m1, m2, m3, NULL);
12915 return result;
12918 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12919 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12920 If ARG_SINP and ARG_COSP are NULL then the result is returned
12921 as a complex value.
12922 The type is taken from the type of ARG and is used for setting the
12923 precision of the calculation and results. */
12925 static tree
12926 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12928 tree const type = TREE_TYPE (arg);
12929 tree result = NULL_TREE;
12931 STRIP_NOPS (arg);
12933 /* To proceed, MPFR must exactly represent the target floating point
12934 format, which only happens when the target base equals two. */
12935 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12936 && TREE_CODE (arg) == REAL_CST
12937 && !TREE_OVERFLOW (arg))
12939 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12941 if (real_isfinite (ra))
12943 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12944 const int prec = fmt->p;
12945 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12946 tree result_s, result_c;
12947 int inexact;
12948 mpfr_t m, ms, mc;
12950 mpfr_inits2 (prec, m, ms, mc, NULL);
12951 mpfr_from_real (m, ra, GMP_RNDN);
12952 mpfr_clear_flags ();
12953 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12954 result_s = do_mpfr_ckconv (ms, type, inexact);
12955 result_c = do_mpfr_ckconv (mc, type, inexact);
12956 mpfr_clears (m, ms, mc, NULL);
12957 if (result_s && result_c)
12959 /* If we are to return in a complex value do so. */
12960 if (!arg_sinp && !arg_cosp)
12961 return build_complex (build_complex_type (type),
12962 result_c, result_s);
12964 /* Dereference the sin/cos pointer arguments. */
12965 arg_sinp = build_fold_indirect_ref (arg_sinp);
12966 arg_cosp = build_fold_indirect_ref (arg_cosp);
12967 /* Proceed if valid pointer type were passed in. */
12968 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12969 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12971 /* Set the values. */
12972 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12973 result_s);
12974 TREE_SIDE_EFFECTS (result_s) = 1;
12975 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12976 result_c);
12977 TREE_SIDE_EFFECTS (result_c) = 1;
12978 /* Combine the assignments into a compound expr. */
12979 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12980 result_s, result_c));
12985 return result;
12988 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12989 two-argument mpfr order N Bessel function FUNC on them and return
12990 the resulting value as a tree with type TYPE. The mpfr precision
12991 is set to the precision of TYPE. We assume that function FUNC
12992 returns zero if the result could be calculated exactly within the
12993 requested precision. */
12994 static tree
12995 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12996 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12997 const REAL_VALUE_TYPE *min, bool inclusive)
12999 tree result = NULL_TREE;
13001 STRIP_NOPS (arg1);
13002 STRIP_NOPS (arg2);
13004 /* To proceed, MPFR must exactly represent the target floating point
13005 format, which only happens when the target base equals two. */
13006 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13007 && host_integerp (arg1, 0)
13008 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13010 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13011 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13013 if (n == (long)n
13014 && real_isfinite (ra)
13015 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13017 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13018 const int prec = fmt->p;
13019 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13020 int inexact;
13021 mpfr_t m;
13023 mpfr_init2 (m, prec);
13024 mpfr_from_real (m, ra, GMP_RNDN);
13025 mpfr_clear_flags ();
13026 inexact = func (m, n, m, rnd);
13027 result = do_mpfr_ckconv (m, type, inexact);
13028 mpfr_clear (m);
13032 return result;
13035 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13036 the pointer *(ARG_QUO) and return the result. The type is taken
13037 from the type of ARG0 and is used for setting the precision of the
13038 calculation and results. */
13040 static tree
13041 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13043 tree const type = TREE_TYPE (arg0);
13044 tree result = NULL_TREE;
13046 STRIP_NOPS (arg0);
13047 STRIP_NOPS (arg1);
13049 /* To proceed, MPFR must exactly represent the target floating point
13050 format, which only happens when the target base equals two. */
13051 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13052 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13053 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13055 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13056 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13058 if (real_isfinite (ra0) && real_isfinite (ra1))
13060 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13061 const int prec = fmt->p;
13062 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13063 tree result_rem;
13064 long integer_quo;
13065 mpfr_t m0, m1;
13067 mpfr_inits2 (prec, m0, m1, NULL);
13068 mpfr_from_real (m0, ra0, GMP_RNDN);
13069 mpfr_from_real (m1, ra1, GMP_RNDN);
13070 mpfr_clear_flags ();
13071 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13072 /* Remquo is independent of the rounding mode, so pass
13073 inexact=0 to do_mpfr_ckconv(). */
13074 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13075 mpfr_clears (m0, m1, NULL);
13076 if (result_rem)
13078 /* MPFR calculates quo in the host's long so it may
13079 return more bits in quo than the target int can hold
13080 if sizeof(host long) > sizeof(target int). This can
13081 happen even for native compilers in LP64 mode. In
13082 these cases, modulo the quo value with the largest
13083 number that the target int can hold while leaving one
13084 bit for the sign. */
13085 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13086 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13088 /* Dereference the quo pointer argument. */
13089 arg_quo = build_fold_indirect_ref (arg_quo);
13090 /* Proceed iff a valid pointer type was passed in. */
13091 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13093 /* Set the value. */
13094 tree result_quo = fold_build2 (MODIFY_EXPR,
13095 TREE_TYPE (arg_quo), arg_quo,
13096 build_int_cst (NULL, integer_quo));
13097 TREE_SIDE_EFFECTS (result_quo) = 1;
13098 /* Combine the quo assignment with the rem. */
13099 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13100 result_quo, result_rem));
13105 return result;
13108 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13109 resulting value as a tree with type TYPE. The mpfr precision is
13110 set to the precision of TYPE. We assume that this mpfr function
13111 returns zero if the result could be calculated exactly within the
13112 requested precision. In addition, the integer pointer represented
13113 by ARG_SG will be dereferenced and set to the appropriate signgam
13114 (-1,1) value. */
13116 static tree
13117 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13119 tree result = NULL_TREE;
13121 STRIP_NOPS (arg);
13123 /* To proceed, MPFR must exactly represent the target floating point
13124 format, which only happens when the target base equals two. Also
13125 verify ARG is a constant and that ARG_SG is an int pointer. */
13126 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13127 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13128 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13129 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13131 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13133 /* In addition to NaN and Inf, the argument cannot be zero or a
13134 negative integer. */
13135 if (real_isfinite (ra)
13136 && ra->cl != rvc_zero
13137 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13139 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13140 const int prec = fmt->p;
13141 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13142 int inexact, sg;
13143 mpfr_t m;
13144 tree result_lg;
13146 mpfr_init2 (m, prec);
13147 mpfr_from_real (m, ra, GMP_RNDN);
13148 mpfr_clear_flags ();
13149 inexact = mpfr_lgamma (m, &sg, m, rnd);
13150 result_lg = do_mpfr_ckconv (m, type, inexact);
13151 mpfr_clear (m);
13152 if (result_lg)
13154 tree result_sg;
13156 /* Dereference the arg_sg pointer argument. */
13157 arg_sg = build_fold_indirect_ref (arg_sg);
13158 /* Assign the signgam value into *arg_sg. */
13159 result_sg = fold_build2 (MODIFY_EXPR,
13160 TREE_TYPE (arg_sg), arg_sg,
13161 build_int_cst (NULL, sg));
13162 TREE_SIDE_EFFECTS (result_sg) = 1;
13163 /* Combine the signgam assignment with the lgamma result. */
13164 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13165 result_sg, result_lg));
13170 return result;
13173 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13174 function FUNC on it and return the resulting value as a tree with
13175 type TYPE. The mpfr precision is set to the precision of TYPE. We
13176 assume that function FUNC returns zero if the result could be
13177 calculated exactly within the requested precision. */
13179 static tree
13180 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13182 tree result = NULL_TREE;
13184 STRIP_NOPS (arg);
13186 /* To proceed, MPFR must exactly represent the target floating point
13187 format, which only happens when the target base equals two. */
13188 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13189 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13190 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13192 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13193 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13195 if (real_isfinite (re) && real_isfinite (im))
13197 const struct real_format *const fmt =
13198 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13199 const int prec = fmt->p;
13200 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13201 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13202 int inexact;
13203 mpc_t m;
13205 mpc_init2 (m, prec);
13206 mpfr_from_real (mpc_realref(m), re, rnd);
13207 mpfr_from_real (mpc_imagref(m), im, rnd);
13208 mpfr_clear_flags ();
13209 inexact = func (m, m, crnd);
13210 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13211 mpc_clear (m);
13215 return result;
13218 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13219 mpc function FUNC on it and return the resulting value as a tree
13220 with type TYPE. The mpfr precision is set to the precision of
13221 TYPE. We assume that function FUNC returns zero if the result
13222 could be calculated exactly within the requested precision. If
13223 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13224 in the arguments and/or results. */
13226 tree
13227 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13228 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13230 tree result = NULL_TREE;
13232 STRIP_NOPS (arg0);
13233 STRIP_NOPS (arg1);
13235 /* To proceed, MPFR must exactly represent the target floating point
13236 format, which only happens when the target base equals two. */
13237 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13238 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13239 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13240 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13241 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13243 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13244 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13245 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13246 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13248 if (do_nonfinite
13249 || (real_isfinite (re0) && real_isfinite (im0)
13250 && real_isfinite (re1) && real_isfinite (im1)))
13252 const struct real_format *const fmt =
13253 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13254 const int prec = fmt->p;
13255 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13256 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13257 int inexact;
13258 mpc_t m0, m1;
13260 mpc_init2 (m0, prec);
13261 mpc_init2 (m1, prec);
13262 mpfr_from_real (mpc_realref(m0), re0, rnd);
13263 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13264 mpfr_from_real (mpc_realref(m1), re1, rnd);
13265 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13266 mpfr_clear_flags ();
13267 inexact = func (m0, m0, m1, crnd);
13268 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13269 mpc_clear (m0);
13270 mpc_clear (m1);
13274 return result;
13277 /* FIXME tuples.
13278 The functions below provide an alternate interface for folding
13279 builtin function calls presented as GIMPLE_CALL statements rather
13280 than as CALL_EXPRs. The folded result is still expressed as a
13281 tree. There is too much code duplication in the handling of
13282 varargs functions, and a more intrusive re-factoring would permit
13283 better sharing of code between the tree and statement-based
13284 versions of these functions. */
13286 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13287 along with N new arguments specified as the "..." parameters. SKIP
13288 is the number of arguments in STMT to be omitted. This function is used
13289 to do varargs-to-varargs transformations. */
13291 static tree
13292 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13294 int oldnargs = gimple_call_num_args (stmt);
13295 int nargs = oldnargs - skip + n;
13296 tree fntype = TREE_TYPE (fndecl);
13297 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13298 tree *buffer;
13299 int i, j;
13300 va_list ap;
13301 location_t loc = gimple_location (stmt);
13303 buffer = XALLOCAVEC (tree, nargs);
13304 va_start (ap, n);
13305 for (i = 0; i < n; i++)
13306 buffer[i] = va_arg (ap, tree);
13307 va_end (ap);
13308 for (j = skip; j < oldnargs; j++, i++)
13309 buffer[i] = gimple_call_arg (stmt, j);
13311 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13314 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13315 a normal call should be emitted rather than expanding the function
13316 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13318 static tree
13319 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13321 tree dest, size, len, fn, fmt, flag;
13322 const char *fmt_str;
13323 int nargs = gimple_call_num_args (stmt);
13325 /* Verify the required arguments in the original call. */
13326 if (nargs < 4)
13327 return NULL_TREE;
13328 dest = gimple_call_arg (stmt, 0);
13329 if (!validate_arg (dest, POINTER_TYPE))
13330 return NULL_TREE;
13331 flag = gimple_call_arg (stmt, 1);
13332 if (!validate_arg (flag, INTEGER_TYPE))
13333 return NULL_TREE;
13334 size = gimple_call_arg (stmt, 2);
13335 if (!validate_arg (size, INTEGER_TYPE))
13336 return NULL_TREE;
13337 fmt = gimple_call_arg (stmt, 3);
13338 if (!validate_arg (fmt, POINTER_TYPE))
13339 return NULL_TREE;
13341 if (! host_integerp (size, 1))
13342 return NULL_TREE;
13344 len = NULL_TREE;
13346 if (!init_target_chars ())
13347 return NULL_TREE;
13349 /* Check whether the format is a literal string constant. */
13350 fmt_str = c_getstr (fmt);
13351 if (fmt_str != NULL)
13353 /* If the format doesn't contain % args or %%, we know the size. */
13354 if (strchr (fmt_str, target_percent) == 0)
13356 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13357 len = build_int_cstu (size_type_node, strlen (fmt_str));
13359 /* If the format is "%s" and first ... argument is a string literal,
13360 we know the size too. */
13361 else if (fcode == BUILT_IN_SPRINTF_CHK
13362 && strcmp (fmt_str, target_percent_s) == 0)
13364 tree arg;
13366 if (nargs == 5)
13368 arg = gimple_call_arg (stmt, 4);
13369 if (validate_arg (arg, POINTER_TYPE))
13371 len = c_strlen (arg, 1);
13372 if (! len || ! host_integerp (len, 1))
13373 len = NULL_TREE;
13379 if (! integer_all_onesp (size))
13381 if (! len || ! tree_int_cst_lt (len, size))
13382 return NULL_TREE;
13385 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13386 or if format doesn't contain % chars or is "%s". */
13387 if (! integer_zerop (flag))
13389 if (fmt_str == NULL)
13390 return NULL_TREE;
13391 if (strchr (fmt_str, target_percent) != NULL
13392 && strcmp (fmt_str, target_percent_s))
13393 return NULL_TREE;
13396 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13397 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13398 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13399 if (!fn)
13400 return NULL_TREE;
13402 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13405 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13406 a normal call should be emitted rather than expanding the function
13407 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13408 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13409 passed as second argument. */
13411 tree
13412 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13413 enum built_in_function fcode)
13415 tree dest, size, len, fn, fmt, flag;
13416 const char *fmt_str;
13418 /* Verify the required arguments in the original call. */
13419 if (gimple_call_num_args (stmt) < 5)
13420 return NULL_TREE;
13421 dest = gimple_call_arg (stmt, 0);
13422 if (!validate_arg (dest, POINTER_TYPE))
13423 return NULL_TREE;
13424 len = gimple_call_arg (stmt, 1);
13425 if (!validate_arg (len, INTEGER_TYPE))
13426 return NULL_TREE;
13427 flag = gimple_call_arg (stmt, 2);
13428 if (!validate_arg (flag, INTEGER_TYPE))
13429 return NULL_TREE;
13430 size = gimple_call_arg (stmt, 3);
13431 if (!validate_arg (size, INTEGER_TYPE))
13432 return NULL_TREE;
13433 fmt = gimple_call_arg (stmt, 4);
13434 if (!validate_arg (fmt, POINTER_TYPE))
13435 return NULL_TREE;
13437 if (! host_integerp (size, 1))
13438 return NULL_TREE;
13440 if (! integer_all_onesp (size))
13442 if (! host_integerp (len, 1))
13444 /* If LEN is not constant, try MAXLEN too.
13445 For MAXLEN only allow optimizing into non-_ocs function
13446 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13447 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13448 return NULL_TREE;
13450 else
13451 maxlen = len;
13453 if (tree_int_cst_lt (size, maxlen))
13454 return NULL_TREE;
13457 if (!init_target_chars ())
13458 return NULL_TREE;
13460 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13461 or if format doesn't contain % chars or is "%s". */
13462 if (! integer_zerop (flag))
13464 fmt_str = c_getstr (fmt);
13465 if (fmt_str == NULL)
13466 return NULL_TREE;
13467 if (strchr (fmt_str, target_percent) != NULL
13468 && strcmp (fmt_str, target_percent_s))
13469 return NULL_TREE;
13472 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13473 available. */
13474 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13475 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13476 if (!fn)
13477 return NULL_TREE;
13479 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13482 /* Builtins with folding operations that operate on "..." arguments
13483 need special handling; we need to store the arguments in a convenient
13484 data structure before attempting any folding. Fortunately there are
13485 only a few builtins that fall into this category. FNDECL is the
13486 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13487 result of the function call is ignored. */
13489 static tree
13490 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13491 bool ignore ATTRIBUTE_UNUSED)
13493 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13494 tree ret = NULL_TREE;
13496 switch (fcode)
13498 case BUILT_IN_SPRINTF_CHK:
13499 case BUILT_IN_VSPRINTF_CHK:
13500 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13501 break;
13503 case BUILT_IN_SNPRINTF_CHK:
13504 case BUILT_IN_VSNPRINTF_CHK:
13505 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13507 default:
13508 break;
13510 if (ret)
13512 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13513 TREE_NO_WARNING (ret) = 1;
13514 return ret;
13516 return NULL_TREE;
13519 /* A wrapper function for builtin folding that prevents warnings for
13520 "statement without effect" and the like, caused by removing the
13521 call node earlier than the warning is generated. */
13523 tree
13524 fold_call_stmt (gimple stmt, bool ignore)
13526 tree ret = NULL_TREE;
13527 tree fndecl = gimple_call_fndecl (stmt);
13528 location_t loc = gimple_location (stmt);
13529 if (fndecl
13530 && TREE_CODE (fndecl) == FUNCTION_DECL
13531 && DECL_BUILT_IN (fndecl)
13532 && !gimple_call_va_arg_pack_p (stmt))
13534 int nargs = gimple_call_num_args (stmt);
13536 if (avoid_folding_inline_builtin (fndecl))
13537 return NULL_TREE;
13538 /* FIXME: Don't use a list in this interface. */
13539 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13541 tree arglist = NULL_TREE;
13542 int i;
13543 for (i = nargs - 1; i >= 0; i--)
13544 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13545 return targetm.fold_builtin (fndecl, arglist, ignore);
13547 else
13549 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13551 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13552 int i;
13553 for (i = 0; i < nargs; i++)
13554 args[i] = gimple_call_arg (stmt, i);
13555 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13557 if (!ret)
13558 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13559 if (ret)
13561 /* Propagate location information from original call to
13562 expansion of builtin. Otherwise things like
13563 maybe_emit_chk_warning, that operate on the expansion
13564 of a builtin, will use the wrong location information. */
13565 if (gimple_has_location (stmt))
13567 tree realret = ret;
13568 if (TREE_CODE (ret) == NOP_EXPR)
13569 realret = TREE_OPERAND (ret, 0);
13570 if (CAN_HAVE_LOCATION_P (realret)
13571 && !EXPR_HAS_LOCATION (realret))
13572 SET_EXPR_LOCATION (realret, loc);
13573 return realret;
13575 return ret;
13579 return NULL_TREE;
13582 /* Look up the function in built_in_decls that corresponds to DECL
13583 and set ASMSPEC as its user assembler name. DECL must be a
13584 function decl that declares a builtin. */
13586 void
13587 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13589 tree builtin;
13590 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13591 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13592 && asmspec != 0);
13594 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13595 set_user_assembler_name (builtin, asmspec);
13596 switch (DECL_FUNCTION_CODE (decl))
13598 case BUILT_IN_MEMCPY:
13599 init_block_move_fn (asmspec);
13600 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13601 break;
13602 case BUILT_IN_MEMSET:
13603 init_block_clear_fn (asmspec);
13604 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13605 break;
13606 case BUILT_IN_MEMMOVE:
13607 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13608 break;
13609 case BUILT_IN_MEMCMP:
13610 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13611 break;
13612 case BUILT_IN_ABORT:
13613 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13614 break;
13615 case BUILT_IN_FFS:
13616 if (INT_TYPE_SIZE < BITS_PER_WORD)
13618 set_user_assembler_libfunc ("ffs", asmspec);
13619 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13620 MODE_INT, 0), "ffs");
13622 break;
13623 default:
13624 break;