2010-02-22 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / builtins.c
blob1e089ef99c40e392aeeb1d95dc5f42e51f7e83b4
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
68 const char * built_in_names[(int) END_BUILTINS] =
70 #include "builtins.def"
72 #undef DEF_BUILTIN
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls[(int) END_BUILTINS];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases). */
80 tree implicit_built_in_decls[(int) END_BUILTINS];
82 static const char *c_getstr (tree);
83 static rtx c_readstr (const char *, enum machine_mode);
84 static int target_char_cast (tree, char *);
85 static rtx get_memory_rtx (tree, tree);
86 static int apply_args_size (void);
87 static int apply_result_size (void);
88 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
89 static rtx result_vector (int, rtx);
90 #endif
91 static void expand_builtin_update_setjmp_buf (rtx);
92 static void expand_builtin_prefetch (tree);
93 static rtx expand_builtin_apply_args (void);
94 static rtx expand_builtin_apply_args_1 (void);
95 static rtx expand_builtin_apply (rtx, rtx, rtx);
96 static void expand_builtin_return (rtx);
97 static enum type_class type_to_class (tree);
98 static rtx expand_builtin_classify_type (tree);
99 static void expand_errno_check (tree, rtx);
100 static rtx expand_builtin_mathfn (tree, rtx, rtx);
101 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
102 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
103 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
104 static rtx expand_builtin_sincos (tree);
105 static rtx expand_builtin_cexpi (tree, rtx, rtx);
106 static rtx expand_builtin_int_roundingfn (tree, rtx);
107 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
108 static rtx expand_builtin_args_info (tree);
109 static rtx expand_builtin_next_arg (void);
110 static rtx expand_builtin_va_start (tree);
111 static rtx expand_builtin_va_end (tree);
112 static rtx expand_builtin_va_copy (tree);
113 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strcmp (tree, rtx);
115 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
116 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_strcpy (tree, rtx);
122 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
123 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_strncpy (tree, rtx);
125 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
127 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
128 static rtx expand_builtin_bzero (tree);
129 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
130 static rtx expand_builtin_alloca (tree, rtx);
131 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
132 static rtx expand_builtin_frame_address (tree, tree);
133 static tree stabilize_va_list_loc (location_t, tree, int);
134 static rtx expand_builtin_expect (tree, rtx);
135 static tree fold_builtin_constant_p (tree);
136 static tree fold_builtin_expect (location_t, tree, tree);
137 static tree fold_builtin_classify_type (tree);
138 static tree fold_builtin_strlen (location_t, tree, tree);
139 static tree fold_builtin_inf (location_t, tree, int);
140 static tree fold_builtin_nan (tree, tree, int);
141 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
142 static bool validate_arg (const_tree, enum tree_code code);
143 static bool integer_valued_real_p (tree);
144 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
145 static bool readonly_data_expr (tree);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_sqrt (location_t, tree, tree);
149 static tree fold_builtin_cbrt (location_t, tree, tree);
150 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_cos (location_t, tree, tree, tree);
153 static tree fold_builtin_cosh (location_t, tree, tree, tree);
154 static tree fold_builtin_tan (tree, tree);
155 static tree fold_builtin_trunc (location_t, tree, tree);
156 static tree fold_builtin_floor (location_t, tree, tree);
157 static tree fold_builtin_ceil (location_t, tree, tree);
158 static tree fold_builtin_round (location_t, tree, tree);
159 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
160 static tree fold_builtin_bitop (tree, tree);
161 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
162 static tree fold_builtin_strchr (location_t, tree, tree, tree);
163 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
165 static tree fold_builtin_strcmp (location_t, tree, tree);
166 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
167 static tree fold_builtin_signbit (location_t, tree, tree);
168 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_isascii (location_t, tree);
170 static tree fold_builtin_toascii (location_t, tree);
171 static tree fold_builtin_isdigit (location_t, tree);
172 static tree fold_builtin_fabs (location_t, tree, tree);
173 static tree fold_builtin_abs (location_t, tree, tree);
174 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 enum tree_code);
176 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
177 static tree fold_builtin_0 (location_t, tree, bool);
178 static tree fold_builtin_1 (location_t, tree, tree, bool);
179 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
180 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
181 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
182 static tree fold_builtin_varargs (location_t, tree, tree, bool);
184 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
185 static tree fold_builtin_strstr (location_t, tree, tree, tree);
186 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
187 static tree fold_builtin_strcat (location_t, tree, tree);
188 static tree fold_builtin_strncat (location_t, tree, tree, tree);
189 static tree fold_builtin_strspn (location_t, tree, tree);
190 static tree fold_builtin_strcspn (location_t, tree, tree);
191 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
193 static rtx expand_builtin_object_size (tree);
194 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
195 enum built_in_function);
196 static void maybe_emit_chk_warning (tree, enum built_in_function);
197 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
198 static void maybe_emit_free_warning (tree);
199 static tree fold_builtin_object_size (tree, tree);
200 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
201 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
202 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
203 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
204 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
205 enum built_in_function);
206 static bool init_target_chars (void);
208 static unsigned HOST_WIDE_INT target_newline;
209 static unsigned HOST_WIDE_INT target_percent;
210 static unsigned HOST_WIDE_INT target_c;
211 static unsigned HOST_WIDE_INT target_s;
212 static char target_percent_c[3];
213 static char target_percent_s[3];
214 static char target_percent_s_newline[4];
215 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
216 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
217 static tree do_mpfr_arg2 (tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
219 static tree do_mpfr_arg3 (tree, tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_sincos (tree, tree, tree);
222 static tree do_mpfr_bessel_n (tree, tree, tree,
223 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
224 const REAL_VALUE_TYPE *, bool);
225 static tree do_mpfr_remquo (tree, tree, tree);
226 static tree do_mpfr_lgamma_r (tree, tree, tree);
228 /* Return true if NAME starts with __builtin_ or __sync_. */
230 bool
231 is_builtin_name (const char *name)
233 if (strncmp (name, "__builtin_", 10) == 0)
234 return true;
235 if (strncmp (name, "__sync_", 7) == 0)
236 return true;
237 return false;
241 /* Return true if DECL is a function symbol representing a built-in. */
243 bool
244 is_builtin_fn (tree decl)
246 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
250 /* Return true if NODE should be considered for inline expansion regardless
251 of the optimization level. This means whenever a function is invoked with
252 its "internal" name, which normally contains the prefix "__builtin". */
254 static bool
255 called_as_built_in (tree node)
257 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
258 we want the name used to call the function, not the name it
259 will have. */
260 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
261 return is_builtin_name (name);
264 /* Return the alignment in bits of EXP, an object.
265 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
266 guessed alignment e.g. from type alignment. */
269 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
271 unsigned int inner;
273 inner = max_align;
274 if (handled_component_p (exp))
276 HOST_WIDE_INT bitsize, bitpos;
277 tree offset;
278 enum machine_mode mode;
279 int unsignedp, volatilep;
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
283 if (bitpos)
284 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
285 while (offset)
287 tree next_offset;
289 if (TREE_CODE (offset) == PLUS_EXPR)
291 next_offset = TREE_OPERAND (offset, 0);
292 offset = TREE_OPERAND (offset, 1);
294 else
295 next_offset = NULL;
296 if (host_integerp (offset, 1))
298 /* Any overflow in calculating offset_bits won't change
299 the alignment. */
300 unsigned offset_bits
301 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
303 if (offset_bits)
304 inner = MIN (inner, (offset_bits & -offset_bits));
306 else if (TREE_CODE (offset) == MULT_EXPR
307 && host_integerp (TREE_OPERAND (offset, 1), 1))
309 /* Any overflow in calculating offset_factor won't change
310 the alignment. */
311 unsigned offset_factor
312 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
313 * BITS_PER_UNIT);
315 if (offset_factor)
316 inner = MIN (inner, (offset_factor & -offset_factor));
318 else
320 inner = MIN (inner, BITS_PER_UNIT);
321 break;
323 offset = next_offset;
326 if (TREE_CODE (exp) == CONST_DECL)
327 exp = DECL_INITIAL (exp);
328 if (DECL_P (exp)
329 && TREE_CODE (exp) != LABEL_DECL)
330 align = MIN (inner, DECL_ALIGN (exp));
331 #ifdef CONSTANT_ALIGNMENT
332 else if (CONSTANT_CLASS_P (exp))
333 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
334 #endif
335 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
336 || TREE_CODE (exp) == INDIRECT_REF)
337 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
338 else
339 align = MIN (align, inner);
340 return MIN (align, max_align);
343 /* Returns true iff we can trust that alignment information has been
344 calculated properly. */
346 bool
347 can_trust_pointer_alignment (void)
349 /* We rely on TER to compute accurate alignment information. */
350 return (optimize && flag_tree_ter);
353 /* Return the alignment in bits of EXP, a pointer valued expression.
354 But don't return more than MAX_ALIGN no matter what.
355 The alignment returned is, by default, the alignment of the thing that
356 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
358 Otherwise, look at the expression to see if we can do better, i.e., if the
359 expression is actually pointing at an object whose alignment is tighter. */
362 get_pointer_alignment (tree exp, unsigned int max_align)
364 unsigned int align, inner;
366 if (!can_trust_pointer_alignment ())
367 return 0;
369 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
370 return 0;
372 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
373 align = MIN (align, max_align);
375 while (1)
377 switch (TREE_CODE (exp))
379 CASE_CONVERT:
380 exp = TREE_OPERAND (exp, 0);
381 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
382 return align;
384 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
385 align = MIN (inner, max_align);
386 break;
388 case POINTER_PLUS_EXPR:
389 /* If sum of pointer + int, restrict our maximum alignment to that
390 imposed by the integer. If not, we can't do any better than
391 ALIGN. */
392 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
393 return align;
395 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
396 & (max_align / BITS_PER_UNIT - 1))
397 != 0)
398 max_align >>= 1;
400 exp = TREE_OPERAND (exp, 0);
401 break;
403 case ADDR_EXPR:
404 /* See what we are pointing at and look at its alignment. */
405 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
407 default:
408 return align;
413 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
414 way, because it could contain a zero byte in the middle.
415 TREE_STRING_LENGTH is the size of the character array, not the string.
417 ONLY_VALUE should be nonzero if the result is not going to be emitted
418 into the instruction stream and zero if it is going to be expanded.
419 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
420 is returned, otherwise NULL, since
421 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
422 evaluate the side-effects.
424 The value returned is of type `ssizetype'.
426 Unfortunately, string_constant can't access the values of const char
427 arrays with initializers, so neither can we do so here. */
429 tree
430 c_strlen (tree src, int only_value)
432 tree offset_node;
433 HOST_WIDE_INT offset;
434 int max;
435 const char *ptr;
436 location_t loc;
438 STRIP_NOPS (src);
439 if (TREE_CODE (src) == COND_EXPR
440 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
442 tree len1, len2;
444 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
445 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
446 if (tree_int_cst_equal (len1, len2))
447 return len1;
450 if (TREE_CODE (src) == COMPOUND_EXPR
451 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
452 return c_strlen (TREE_OPERAND (src, 1), only_value);
454 if (EXPR_HAS_LOCATION (src))
455 loc = EXPR_LOCATION (src);
456 else
457 loc = input_location;
459 src = string_constant (src, &offset_node);
460 if (src == 0)
461 return NULL_TREE;
463 max = TREE_STRING_LENGTH (src) - 1;
464 ptr = TREE_STRING_POINTER (src);
466 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
468 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
469 compute the offset to the following null if we don't know where to
470 start searching for it. */
471 int i;
473 for (i = 0; i < max; i++)
474 if (ptr[i] == 0)
475 return NULL_TREE;
477 /* We don't know the starting offset, but we do know that the string
478 has no internal zero bytes. We can assume that the offset falls
479 within the bounds of the string; otherwise, the programmer deserves
480 what he gets. Subtract the offset from the length of the string,
481 and return that. This would perhaps not be valid if we were dealing
482 with named arrays in addition to literal string constants. */
484 return size_diffop_loc (loc, size_int (max), offset_node);
487 /* We have a known offset into the string. Start searching there for
488 a null character if we can represent it as a single HOST_WIDE_INT. */
489 if (offset_node == 0)
490 offset = 0;
491 else if (! host_integerp (offset_node, 0))
492 offset = -1;
493 else
494 offset = tree_low_cst (offset_node, 0);
496 /* If the offset is known to be out of bounds, warn, and call strlen at
497 runtime. */
498 if (offset < 0 || offset > max)
500 /* Suppress multiple warnings for propagated constant strings. */
501 if (! TREE_NO_WARNING (src))
503 warning_at (loc, 0, "offset outside bounds of constant string");
504 TREE_NO_WARNING (src) = 1;
506 return NULL_TREE;
509 /* Use strlen to search for the first zero byte. Since any strings
510 constructed with build_string will have nulls appended, we win even
511 if we get handed something like (char[4])"abcd".
513 Since OFFSET is our starting index into the string, no further
514 calculation is needed. */
515 return ssize_int (strlen (ptr + offset));
518 /* Return a char pointer for a C string if it is a string constant
519 or sum of string constant and integer constant. */
521 static const char *
522 c_getstr (tree src)
524 tree offset_node;
526 src = string_constant (src, &offset_node);
527 if (src == 0)
528 return 0;
530 if (offset_node == 0)
531 return TREE_STRING_POINTER (src);
532 else if (!host_integerp (offset_node, 1)
533 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
534 return 0;
536 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
539 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
540 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
542 static rtx
543 c_readstr (const char *str, enum machine_mode mode)
545 HOST_WIDE_INT c[2];
546 HOST_WIDE_INT ch;
547 unsigned int i, j;
549 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
551 c[0] = 0;
552 c[1] = 0;
553 ch = 1;
554 for (i = 0; i < GET_MODE_SIZE (mode); i++)
556 j = i;
557 if (WORDS_BIG_ENDIAN)
558 j = GET_MODE_SIZE (mode) - i - 1;
559 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
560 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
561 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
562 j *= BITS_PER_UNIT;
563 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
565 if (ch)
566 ch = (unsigned char) str[i];
567 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
569 return immed_double_const (c[0], c[1], mode);
572 /* Cast a target constant CST to target CHAR and if that value fits into
573 host char type, return zero and put that value into variable pointed to by
574 P. */
576 static int
577 target_char_cast (tree cst, char *p)
579 unsigned HOST_WIDE_INT val, hostval;
581 if (!host_integerp (cst, 1)
582 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
583 return 1;
585 val = tree_low_cst (cst, 1);
586 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
587 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
589 hostval = val;
590 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
591 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
593 if (val != hostval)
594 return 1;
596 *p = hostval;
597 return 0;
600 /* Similar to save_expr, but assumes that arbitrary code is not executed
601 in between the multiple evaluations. In particular, we assume that a
602 non-addressable local variable will not be modified. */
604 static tree
605 builtin_save_expr (tree exp)
607 if (TREE_ADDRESSABLE (exp) == 0
608 && (TREE_CODE (exp) == PARM_DECL
609 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
610 return exp;
612 return save_expr (exp);
615 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
616 times to get the address of either a higher stack frame, or a return
617 address located within it (depending on FNDECL_CODE). */
619 static rtx
620 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
622 int i;
624 #ifdef INITIAL_FRAME_ADDRESS_RTX
625 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
626 #else
627 rtx tem;
629 /* For a zero count with __builtin_return_address, we don't care what
630 frame address we return, because target-specific definitions will
631 override us. Therefore frame pointer elimination is OK, and using
632 the soft frame pointer is OK.
634 For a nonzero count, or a zero count with __builtin_frame_address,
635 we require a stable offset from the current frame pointer to the
636 previous one, so we must use the hard frame pointer, and
637 we must disable frame pointer elimination. */
638 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
639 tem = frame_pointer_rtx;
640 else
642 tem = hard_frame_pointer_rtx;
644 /* Tell reload not to eliminate the frame pointer. */
645 crtl->accesses_prior_frames = 1;
647 #endif
649 /* Some machines need special handling before we can access
650 arbitrary frames. For example, on the SPARC, we must first flush
651 all register windows to the stack. */
652 #ifdef SETUP_FRAME_ADDRESSES
653 if (count > 0)
654 SETUP_FRAME_ADDRESSES ();
655 #endif
657 /* On the SPARC, the return address is not in the frame, it is in a
658 register. There is no way to access it off of the current frame
659 pointer, but it can be accessed off the previous frame pointer by
660 reading the value from the register window save area. */
661 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
662 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
663 count--;
664 #endif
666 /* Scan back COUNT frames to the specified frame. */
667 for (i = 0; i < count; i++)
669 /* Assume the dynamic chain pointer is in the word that the
670 frame address points to, unless otherwise specified. */
671 #ifdef DYNAMIC_CHAIN_ADDRESS
672 tem = DYNAMIC_CHAIN_ADDRESS (tem);
673 #endif
674 tem = memory_address (Pmode, tem);
675 tem = gen_frame_mem (Pmode, tem);
676 tem = copy_to_reg (tem);
679 /* For __builtin_frame_address, return what we've got. But, on
680 the SPARC for example, we may have to add a bias. */
681 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
682 #ifdef FRAME_ADDR_RTX
683 return FRAME_ADDR_RTX (tem);
684 #else
685 return tem;
686 #endif
688 /* For __builtin_return_address, get the return address from that frame. */
689 #ifdef RETURN_ADDR_RTX
690 tem = RETURN_ADDR_RTX (count, tem);
691 #else
692 tem = memory_address (Pmode,
693 plus_constant (tem, GET_MODE_SIZE (Pmode)));
694 tem = gen_frame_mem (Pmode, tem);
695 #endif
696 return tem;
699 /* Alias set used for setjmp buffer. */
700 static alias_set_type setjmp_alias_set = -1;
702 /* Construct the leading half of a __builtin_setjmp call. Control will
703 return to RECEIVER_LABEL. This is also called directly by the SJLJ
704 exception handling code. */
706 void
707 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
709 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
710 rtx stack_save;
711 rtx mem;
713 if (setjmp_alias_set == -1)
714 setjmp_alias_set = new_alias_set ();
716 buf_addr = convert_memory_address (Pmode, buf_addr);
718 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
720 /* We store the frame pointer and the address of receiver_label in
721 the buffer and use the rest of it for the stack save area, which
722 is machine-dependent. */
724 mem = gen_rtx_MEM (Pmode, buf_addr);
725 set_mem_alias_set (mem, setjmp_alias_set);
726 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
728 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
729 set_mem_alias_set (mem, setjmp_alias_set);
731 emit_move_insn (validize_mem (mem),
732 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
734 stack_save = gen_rtx_MEM (sa_mode,
735 plus_constant (buf_addr,
736 2 * GET_MODE_SIZE (Pmode)));
737 set_mem_alias_set (stack_save, setjmp_alias_set);
738 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
740 /* If there is further processing to do, do it. */
741 #ifdef HAVE_builtin_setjmp_setup
742 if (HAVE_builtin_setjmp_setup)
743 emit_insn (gen_builtin_setjmp_setup (buf_addr));
744 #endif
746 /* Tell optimize_save_area_alloca that extra work is going to
747 need to go on during alloca. */
748 cfun->calls_setjmp = 1;
750 /* We have a nonlocal label. */
751 cfun->has_nonlocal_label = 1;
754 /* Construct the trailing part of a __builtin_setjmp call. This is
755 also called directly by the SJLJ exception handling code. */
757 void
758 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
760 rtx chain;
762 /* Clobber the FP when we get here, so we have to make sure it's
763 marked as used by this function. */
764 emit_use (hard_frame_pointer_rtx);
766 /* Mark the static chain as clobbered here so life information
767 doesn't get messed up for it. */
768 chain = targetm.calls.static_chain (current_function_decl, true);
769 if (chain && REG_P (chain))
770 emit_clobber (chain);
772 /* Now put in the code to restore the frame pointer, and argument
773 pointer, if needed. */
774 #ifdef HAVE_nonlocal_goto
775 if (! HAVE_nonlocal_goto)
776 #endif
778 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
779 /* This might change the hard frame pointer in ways that aren't
780 apparent to early optimization passes, so force a clobber. */
781 emit_clobber (hard_frame_pointer_rtx);
784 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
785 if (fixed_regs[ARG_POINTER_REGNUM])
787 #ifdef ELIMINABLE_REGS
788 size_t i;
789 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
791 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
792 if (elim_regs[i].from == ARG_POINTER_REGNUM
793 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
794 break;
796 if (i == ARRAY_SIZE (elim_regs))
797 #endif
799 /* Now restore our arg pointer from the address at which it
800 was saved in our stack frame. */
801 emit_move_insn (crtl->args.internal_arg_pointer,
802 copy_to_reg (get_arg_pointer_save_area ()));
805 #endif
807 #ifdef HAVE_builtin_setjmp_receiver
808 if (HAVE_builtin_setjmp_receiver)
809 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
810 else
811 #endif
812 #ifdef HAVE_nonlocal_goto_receiver
813 if (HAVE_nonlocal_goto_receiver)
814 emit_insn (gen_nonlocal_goto_receiver ());
815 else
816 #endif
817 { /* Nothing */ }
819 /* We must not allow the code we just generated to be reordered by
820 scheduling. Specifically, the update of the frame pointer must
821 happen immediately, not later. */
822 emit_insn (gen_blockage ());
825 /* __builtin_longjmp is passed a pointer to an array of five words (not
826 all will be used on all machines). It operates similarly to the C
827 library function of the same name, but is more efficient. Much of
828 the code below is copied from the handling of non-local gotos. */
830 static void
831 expand_builtin_longjmp (rtx buf_addr, rtx value)
833 rtx fp, lab, stack, insn, last;
834 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
836 /* DRAP is needed for stack realign if longjmp is expanded to current
837 function */
838 if (SUPPORTS_STACK_ALIGNMENT)
839 crtl->need_drap = true;
841 if (setjmp_alias_set == -1)
842 setjmp_alias_set = new_alias_set ();
844 buf_addr = convert_memory_address (Pmode, buf_addr);
846 buf_addr = force_reg (Pmode, buf_addr);
848 /* We require that the user must pass a second argument of 1, because
849 that is what builtin_setjmp will return. */
850 gcc_assert (value == const1_rtx);
852 last = get_last_insn ();
853 #ifdef HAVE_builtin_longjmp
854 if (HAVE_builtin_longjmp)
855 emit_insn (gen_builtin_longjmp (buf_addr));
856 else
857 #endif
859 fp = gen_rtx_MEM (Pmode, buf_addr);
860 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
861 GET_MODE_SIZE (Pmode)));
863 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
864 2 * GET_MODE_SIZE (Pmode)));
865 set_mem_alias_set (fp, setjmp_alias_set);
866 set_mem_alias_set (lab, setjmp_alias_set);
867 set_mem_alias_set (stack, setjmp_alias_set);
869 /* Pick up FP, label, and SP from the block and jump. This code is
870 from expand_goto in stmt.c; see there for detailed comments. */
871 #ifdef HAVE_nonlocal_goto
872 if (HAVE_nonlocal_goto)
873 /* We have to pass a value to the nonlocal_goto pattern that will
874 get copied into the static_chain pointer, but it does not matter
875 what that value is, because builtin_setjmp does not use it. */
876 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
877 else
878 #endif
880 lab = copy_to_reg (lab);
882 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
883 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
885 emit_move_insn (hard_frame_pointer_rtx, fp);
886 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
888 emit_use (hard_frame_pointer_rtx);
889 emit_use (stack_pointer_rtx);
890 emit_indirect_jump (lab);
894 /* Search backwards and mark the jump insn as a non-local goto.
895 Note that this precludes the use of __builtin_longjmp to a
896 __builtin_setjmp target in the same function. However, we've
897 already cautioned the user that these functions are for
898 internal exception handling use only. */
899 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
901 gcc_assert (insn != last);
903 if (JUMP_P (insn))
905 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
906 break;
908 else if (CALL_P (insn))
909 break;
913 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
914 and the address of the save area. */
916 static rtx
917 expand_builtin_nonlocal_goto (tree exp)
919 tree t_label, t_save_area;
920 rtx r_label, r_save_area, r_fp, r_sp, insn;
922 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
923 return NULL_RTX;
925 t_label = CALL_EXPR_ARG (exp, 0);
926 t_save_area = CALL_EXPR_ARG (exp, 1);
928 r_label = expand_normal (t_label);
929 r_label = convert_memory_address (Pmode, r_label);
930 r_save_area = expand_normal (t_save_area);
931 r_save_area = convert_memory_address (Pmode, r_save_area);
932 /* Copy the address of the save location to a register just in case it was based
933 on the frame pointer. */
934 r_save_area = copy_to_reg (r_save_area);
935 r_fp = gen_rtx_MEM (Pmode, r_save_area);
936 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
937 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
939 crtl->has_nonlocal_goto = 1;
941 #ifdef HAVE_nonlocal_goto
942 /* ??? We no longer need to pass the static chain value, afaik. */
943 if (HAVE_nonlocal_goto)
944 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
945 else
946 #endif
948 r_label = copy_to_reg (r_label);
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
953 /* Restore frame pointer for containing function.
954 This sets the actual hard register used for the frame pointer
955 to the location of the function's incoming static chain info.
956 The non-local goto handler will then adjust it to contain the
957 proper value and reload the argument pointer, if needed. */
958 emit_move_insn (hard_frame_pointer_rtx, r_fp);
959 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
961 /* USE of hard_frame_pointer_rtx added for consistency;
962 not clear if really needed. */
963 emit_use (hard_frame_pointer_rtx);
964 emit_use (stack_pointer_rtx);
966 /* If the architecture is using a GP register, we must
967 conservatively assume that the target function makes use of it.
968 The prologue of functions with nonlocal gotos must therefore
969 initialize the GP register to the appropriate value, and we
970 must then make sure that this value is live at the point
971 of the jump. (Note that this doesn't necessarily apply
972 to targets with a nonlocal_goto pattern; they are free
973 to implement it in their own way. Note also that this is
974 a no-op if the GP register is a global invariant.) */
975 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
976 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
977 emit_use (pic_offset_table_rtx);
979 emit_indirect_jump (r_label);
982 /* Search backwards to the jump insn and mark it as a
983 non-local goto. */
984 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
986 if (JUMP_P (insn))
988 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
989 break;
991 else if (CALL_P (insn))
992 break;
995 return const0_rtx;
998 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
999 (not all will be used on all machines) that was passed to __builtin_setjmp.
1000 It updates the stack pointer in that block to correspond to the current
1001 stack pointer. */
1003 static void
1004 expand_builtin_update_setjmp_buf (rtx buf_addr)
1006 enum machine_mode sa_mode = Pmode;
1007 rtx stack_save;
1010 #ifdef HAVE_save_stack_nonlocal
1011 if (HAVE_save_stack_nonlocal)
1012 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1013 #endif
1014 #ifdef STACK_SAVEAREA_MODE
1015 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1016 #endif
1018 stack_save
1019 = gen_rtx_MEM (sa_mode,
1020 memory_address
1021 (sa_mode,
1022 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1024 #ifdef HAVE_setjmp
1025 if (HAVE_setjmp)
1026 emit_insn (gen_setjmp ());
1027 #endif
1029 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1032 /* Expand a call to __builtin_prefetch. For a target that does not support
1033 data prefetch, evaluate the memory address argument in case it has side
1034 effects. */
1036 static void
1037 expand_builtin_prefetch (tree exp)
1039 tree arg0, arg1, arg2;
1040 int nargs;
1041 rtx op0, op1, op2;
1043 if (!validate_arglist (exp, POINTER_TYPE, 0))
1044 return;
1046 arg0 = CALL_EXPR_ARG (exp, 0);
1048 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1049 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1050 locality). */
1051 nargs = call_expr_nargs (exp);
1052 if (nargs > 1)
1053 arg1 = CALL_EXPR_ARG (exp, 1);
1054 else
1055 arg1 = integer_zero_node;
1056 if (nargs > 2)
1057 arg2 = CALL_EXPR_ARG (exp, 2);
1058 else
1059 arg2 = build_int_cst (NULL_TREE, 3);
1061 /* Argument 0 is an address. */
1062 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1064 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1065 if (TREE_CODE (arg1) != INTEGER_CST)
1067 error ("second argument to %<__builtin_prefetch%> must be a constant");
1068 arg1 = integer_zero_node;
1070 op1 = expand_normal (arg1);
1071 /* Argument 1 must be either zero or one. */
1072 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1074 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1075 " using zero");
1076 op1 = const0_rtx;
1079 /* Argument 2 (locality) must be a compile-time constant int. */
1080 if (TREE_CODE (arg2) != INTEGER_CST)
1082 error ("third argument to %<__builtin_prefetch%> must be a constant");
1083 arg2 = integer_zero_node;
1085 op2 = expand_normal (arg2);
1086 /* Argument 2 must be 0, 1, 2, or 3. */
1087 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1089 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1090 op2 = const0_rtx;
1093 #ifdef HAVE_prefetch
1094 if (HAVE_prefetch)
1096 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1097 (op0,
1098 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1099 || (GET_MODE (op0) != Pmode))
1101 op0 = convert_memory_address (Pmode, op0);
1102 op0 = force_reg (Pmode, op0);
1104 emit_insn (gen_prefetch (op0, op1, op2));
1106 #endif
1108 /* Don't do anything with direct references to volatile memory, but
1109 generate code to handle other side effects. */
1110 if (!MEM_P (op0) && side_effects_p (op0))
1111 emit_insn (op0);
1114 /* Get a MEM rtx for expression EXP which is the address of an operand
1115 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1116 the maximum length of the block of memory that might be accessed or
1117 NULL if unknown. */
1119 static rtx
1120 get_memory_rtx (tree exp, tree len)
1122 tree orig_exp = exp;
1123 rtx addr, mem;
1124 HOST_WIDE_INT off;
1126 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1127 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1128 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1129 exp = TREE_OPERAND (exp, 0);
1131 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1132 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1134 /* Get an expression we can use to find the attributes to assign to MEM.
1135 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1136 we can. First remove any nops. */
1137 while (CONVERT_EXPR_P (exp)
1138 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1139 exp = TREE_OPERAND (exp, 0);
1141 off = 0;
1142 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1143 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1144 && host_integerp (TREE_OPERAND (exp, 1), 0)
1145 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1146 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1147 else if (TREE_CODE (exp) == ADDR_EXPR)
1148 exp = TREE_OPERAND (exp, 0);
1149 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1150 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1151 else
1152 exp = NULL;
1154 /* Honor attributes derived from exp, except for the alias set
1155 (as builtin stringops may alias with anything) and the size
1156 (as stringops may access multiple array elements). */
1157 if (exp)
1159 set_mem_attributes (mem, exp, 0);
1161 if (off)
1162 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1164 /* Allow the string and memory builtins to overflow from one
1165 field into another, see http://gcc.gnu.org/PR23561.
1166 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1167 memory accessed by the string or memory builtin will fit
1168 within the field. */
1169 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1171 tree mem_expr = MEM_EXPR (mem);
1172 HOST_WIDE_INT offset = -1, length = -1;
1173 tree inner = exp;
1175 while (TREE_CODE (inner) == ARRAY_REF
1176 || CONVERT_EXPR_P (inner)
1177 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1178 || TREE_CODE (inner) == SAVE_EXPR)
1179 inner = TREE_OPERAND (inner, 0);
1181 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1183 if (MEM_OFFSET (mem)
1184 && CONST_INT_P (MEM_OFFSET (mem)))
1185 offset = INTVAL (MEM_OFFSET (mem));
1187 if (offset >= 0 && len && host_integerp (len, 0))
1188 length = tree_low_cst (len, 0);
1190 while (TREE_CODE (inner) == COMPONENT_REF)
1192 tree field = TREE_OPERAND (inner, 1);
1193 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1194 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1196 /* Bitfields are generally not byte-addressable. */
1197 gcc_assert (!DECL_BIT_FIELD (field)
1198 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1199 % BITS_PER_UNIT) == 0
1200 && host_integerp (DECL_SIZE (field), 0)
1201 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1202 % BITS_PER_UNIT) == 0));
1204 /* If we can prove that the memory starting at XEXP (mem, 0) and
1205 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1206 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1207 fields without DECL_SIZE_UNIT like flexible array members. */
1208 if (length >= 0
1209 && DECL_SIZE_UNIT (field)
1210 && host_integerp (DECL_SIZE_UNIT (field), 0))
1212 HOST_WIDE_INT size
1213 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1214 if (offset <= size
1215 && length <= size
1216 && offset + length <= size)
1217 break;
1220 if (offset >= 0
1221 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1222 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1223 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1224 / BITS_PER_UNIT;
1225 else
1227 offset = -1;
1228 length = -1;
1231 mem_expr = TREE_OPERAND (mem_expr, 0);
1232 inner = TREE_OPERAND (inner, 0);
1235 if (mem_expr == NULL)
1236 offset = -1;
1237 if (mem_expr != MEM_EXPR (mem))
1239 set_mem_expr (mem, mem_expr);
1240 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1243 set_mem_alias_set (mem, 0);
1244 set_mem_size (mem, NULL_RTX);
1247 return mem;
1250 /* Built-in functions to perform an untyped call and return. */
1252 /* For each register that may be used for calling a function, this
1253 gives a mode used to copy the register's value. VOIDmode indicates
1254 the register is not used for calling a function. If the machine
1255 has register windows, this gives only the outbound registers.
1256 INCOMING_REGNO gives the corresponding inbound register. */
1257 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1259 /* For each register that may be used for returning values, this gives
1260 a mode used to copy the register's value. VOIDmode indicates the
1261 register is not used for returning values. If the machine has
1262 register windows, this gives only the outbound registers.
1263 INCOMING_REGNO gives the corresponding inbound register. */
1264 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1266 /* Return the size required for the block returned by __builtin_apply_args,
1267 and initialize apply_args_mode. */
1269 static int
1270 apply_args_size (void)
1272 static int size = -1;
1273 int align;
1274 unsigned int regno;
1275 enum machine_mode mode;
1277 /* The values computed by this function never change. */
1278 if (size < 0)
1280 /* The first value is the incoming arg-pointer. */
1281 size = GET_MODE_SIZE (Pmode);
1283 /* The second value is the structure value address unless this is
1284 passed as an "invisible" first argument. */
1285 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1286 size += GET_MODE_SIZE (Pmode);
1288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1289 if (FUNCTION_ARG_REGNO_P (regno))
1291 mode = reg_raw_mode[regno];
1293 gcc_assert (mode != VOIDmode);
1295 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1296 if (size % align != 0)
1297 size = CEIL (size, align) * align;
1298 size += GET_MODE_SIZE (mode);
1299 apply_args_mode[regno] = mode;
1301 else
1303 apply_args_mode[regno] = VOIDmode;
1306 return size;
1309 /* Return the size required for the block returned by __builtin_apply,
1310 and initialize apply_result_mode. */
1312 static int
1313 apply_result_size (void)
1315 static int size = -1;
1316 int align, regno;
1317 enum machine_mode mode;
1319 /* The values computed by this function never change. */
1320 if (size < 0)
1322 size = 0;
1324 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1325 if (FUNCTION_VALUE_REGNO_P (regno))
1327 mode = reg_raw_mode[regno];
1329 gcc_assert (mode != VOIDmode);
1331 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1332 if (size % align != 0)
1333 size = CEIL (size, align) * align;
1334 size += GET_MODE_SIZE (mode);
1335 apply_result_mode[regno] = mode;
1337 else
1338 apply_result_mode[regno] = VOIDmode;
1340 /* Allow targets that use untyped_call and untyped_return to override
1341 the size so that machine-specific information can be stored here. */
1342 #ifdef APPLY_RESULT_SIZE
1343 size = APPLY_RESULT_SIZE;
1344 #endif
1346 return size;
1349 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1350 /* Create a vector describing the result block RESULT. If SAVEP is true,
1351 the result block is used to save the values; otherwise it is used to
1352 restore the values. */
1354 static rtx
1355 result_vector (int savep, rtx result)
1357 int regno, size, align, nelts;
1358 enum machine_mode mode;
1359 rtx reg, mem;
1360 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1362 size = nelts = 0;
1363 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1364 if ((mode = apply_result_mode[regno]) != VOIDmode)
1366 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1367 if (size % align != 0)
1368 size = CEIL (size, align) * align;
1369 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1370 mem = adjust_address (result, mode, size);
1371 savevec[nelts++] = (savep
1372 ? gen_rtx_SET (VOIDmode, mem, reg)
1373 : gen_rtx_SET (VOIDmode, reg, mem));
1374 size += GET_MODE_SIZE (mode);
1376 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1378 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1380 /* Save the state required to perform an untyped call with the same
1381 arguments as were passed to the current function. */
1383 static rtx
1384 expand_builtin_apply_args_1 (void)
1386 rtx registers, tem;
1387 int size, align, regno;
1388 enum machine_mode mode;
1389 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1391 /* Create a block where the arg-pointer, structure value address,
1392 and argument registers can be saved. */
1393 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1395 /* Walk past the arg-pointer and structure value address. */
1396 size = GET_MODE_SIZE (Pmode);
1397 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1398 size += GET_MODE_SIZE (Pmode);
1400 /* Save each register used in calling a function to the block. */
1401 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1402 if ((mode = apply_args_mode[regno]) != VOIDmode)
1404 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1405 if (size % align != 0)
1406 size = CEIL (size, align) * align;
1408 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1410 emit_move_insn (adjust_address (registers, mode, size), tem);
1411 size += GET_MODE_SIZE (mode);
1414 /* Save the arg pointer to the block. */
1415 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1416 #ifdef STACK_GROWS_DOWNWARD
1417 /* We need the pointer as the caller actually passed them to us, not
1418 as we might have pretended they were passed. Make sure it's a valid
1419 operand, as emit_move_insn isn't expected to handle a PLUS. */
1421 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1422 NULL_RTX);
1423 #endif
1424 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1426 size = GET_MODE_SIZE (Pmode);
1428 /* Save the structure value address unless this is passed as an
1429 "invisible" first argument. */
1430 if (struct_incoming_value)
1432 emit_move_insn (adjust_address (registers, Pmode, size),
1433 copy_to_reg (struct_incoming_value));
1434 size += GET_MODE_SIZE (Pmode);
1437 /* Return the address of the block. */
1438 return copy_addr_to_reg (XEXP (registers, 0));
1441 /* __builtin_apply_args returns block of memory allocated on
1442 the stack into which is stored the arg pointer, structure
1443 value address, static chain, and all the registers that might
1444 possibly be used in performing a function call. The code is
1445 moved to the start of the function so the incoming values are
1446 saved. */
1448 static rtx
1449 expand_builtin_apply_args (void)
1451 /* Don't do __builtin_apply_args more than once in a function.
1452 Save the result of the first call and reuse it. */
1453 if (apply_args_value != 0)
1454 return apply_args_value;
1456 /* When this function is called, it means that registers must be
1457 saved on entry to this function. So we migrate the
1458 call to the first insn of this function. */
1459 rtx temp;
1460 rtx seq;
1462 start_sequence ();
1463 temp = expand_builtin_apply_args_1 ();
1464 seq = get_insns ();
1465 end_sequence ();
1467 apply_args_value = temp;
1469 /* Put the insns after the NOTE that starts the function.
1470 If this is inside a start_sequence, make the outer-level insn
1471 chain current, so the code is placed at the start of the
1472 function. If internal_arg_pointer is a non-virtual pseudo,
1473 it needs to be placed after the function that initializes
1474 that pseudo. */
1475 push_topmost_sequence ();
1476 if (REG_P (crtl->args.internal_arg_pointer)
1477 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1478 emit_insn_before (seq, parm_birth_insn);
1479 else
1480 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1481 pop_topmost_sequence ();
1482 return temp;
1486 /* Perform an untyped call and save the state required to perform an
1487 untyped return of whatever value was returned by the given function. */
1489 static rtx
1490 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1492 int size, align, regno;
1493 enum machine_mode mode;
1494 rtx incoming_args, result, reg, dest, src, call_insn;
1495 rtx old_stack_level = 0;
1496 rtx call_fusage = 0;
1497 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1499 arguments = convert_memory_address (Pmode, arguments);
1501 /* Create a block where the return registers can be saved. */
1502 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1504 /* Fetch the arg pointer from the ARGUMENTS block. */
1505 incoming_args = gen_reg_rtx (Pmode);
1506 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1507 #ifndef STACK_GROWS_DOWNWARD
1508 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1509 incoming_args, 0, OPTAB_LIB_WIDEN);
1510 #endif
1512 /* Push a new argument block and copy the arguments. Do not allow
1513 the (potential) memcpy call below to interfere with our stack
1514 manipulations. */
1515 do_pending_stack_adjust ();
1516 NO_DEFER_POP;
1518 /* Save the stack with nonlocal if available. */
1519 #ifdef HAVE_save_stack_nonlocal
1520 if (HAVE_save_stack_nonlocal)
1521 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1522 else
1523 #endif
1524 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1526 /* Allocate a block of memory onto the stack and copy the memory
1527 arguments to the outgoing arguments address. */
1528 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1530 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1531 may have already set current_function_calls_alloca to true.
1532 current_function_calls_alloca won't be set if argsize is zero,
1533 so we have to guarantee need_drap is true here. */
1534 if (SUPPORTS_STACK_ALIGNMENT)
1535 crtl->need_drap = true;
1537 dest = virtual_outgoing_args_rtx;
1538 #ifndef STACK_GROWS_DOWNWARD
1539 if (CONST_INT_P (argsize))
1540 dest = plus_constant (dest, -INTVAL (argsize));
1541 else
1542 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1543 #endif
1544 dest = gen_rtx_MEM (BLKmode, dest);
1545 set_mem_align (dest, PARM_BOUNDARY);
1546 src = gen_rtx_MEM (BLKmode, incoming_args);
1547 set_mem_align (src, PARM_BOUNDARY);
1548 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1550 /* Refer to the argument block. */
1551 apply_args_size ();
1552 arguments = gen_rtx_MEM (BLKmode, arguments);
1553 set_mem_align (arguments, PARM_BOUNDARY);
1555 /* Walk past the arg-pointer and structure value address. */
1556 size = GET_MODE_SIZE (Pmode);
1557 if (struct_value)
1558 size += GET_MODE_SIZE (Pmode);
1560 /* Restore each of the registers previously saved. Make USE insns
1561 for each of these registers for use in making the call. */
1562 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1563 if ((mode = apply_args_mode[regno]) != VOIDmode)
1565 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1566 if (size % align != 0)
1567 size = CEIL (size, align) * align;
1568 reg = gen_rtx_REG (mode, regno);
1569 emit_move_insn (reg, adjust_address (arguments, mode, size));
1570 use_reg (&call_fusage, reg);
1571 size += GET_MODE_SIZE (mode);
1574 /* Restore the structure value address unless this is passed as an
1575 "invisible" first argument. */
1576 size = GET_MODE_SIZE (Pmode);
1577 if (struct_value)
1579 rtx value = gen_reg_rtx (Pmode);
1580 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1581 emit_move_insn (struct_value, value);
1582 if (REG_P (struct_value))
1583 use_reg (&call_fusage, struct_value);
1584 size += GET_MODE_SIZE (Pmode);
1587 /* All arguments and registers used for the call are set up by now! */
1588 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1590 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1591 and we don't want to load it into a register as an optimization,
1592 because prepare_call_address already did it if it should be done. */
1593 if (GET_CODE (function) != SYMBOL_REF)
1594 function = memory_address (FUNCTION_MODE, function);
1596 /* Generate the actual call instruction and save the return value. */
1597 #ifdef HAVE_untyped_call
1598 if (HAVE_untyped_call)
1599 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1600 result, result_vector (1, result)));
1601 else
1602 #endif
1603 #ifdef HAVE_call_value
1604 if (HAVE_call_value)
1606 rtx valreg = 0;
1608 /* Locate the unique return register. It is not possible to
1609 express a call that sets more than one return register using
1610 call_value; use untyped_call for that. In fact, untyped_call
1611 only needs to save the return registers in the given block. */
1612 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1613 if ((mode = apply_result_mode[regno]) != VOIDmode)
1615 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1617 valreg = gen_rtx_REG (mode, regno);
1620 emit_call_insn (GEN_CALL_VALUE (valreg,
1621 gen_rtx_MEM (FUNCTION_MODE, function),
1622 const0_rtx, NULL_RTX, const0_rtx));
1624 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1626 else
1627 #endif
1628 gcc_unreachable ();
1630 /* Find the CALL insn we just emitted, and attach the register usage
1631 information. */
1632 call_insn = last_call_insn ();
1633 add_function_usage_to (call_insn, call_fusage);
1635 /* Restore the stack. */
1636 #ifdef HAVE_save_stack_nonlocal
1637 if (HAVE_save_stack_nonlocal)
1638 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1639 else
1640 #endif
1641 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1643 OK_DEFER_POP;
1645 /* Return the address of the result block. */
1646 result = copy_addr_to_reg (XEXP (result, 0));
1647 return convert_memory_address (ptr_mode, result);
1650 /* Perform an untyped return. */
1652 static void
1653 expand_builtin_return (rtx result)
1655 int size, align, regno;
1656 enum machine_mode mode;
1657 rtx reg;
1658 rtx call_fusage = 0;
1660 result = convert_memory_address (Pmode, result);
1662 apply_result_size ();
1663 result = gen_rtx_MEM (BLKmode, result);
1665 #ifdef HAVE_untyped_return
1666 if (HAVE_untyped_return)
1668 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1669 emit_barrier ();
1670 return;
1672 #endif
1674 /* Restore the return value and note that each value is used. */
1675 size = 0;
1676 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1677 if ((mode = apply_result_mode[regno]) != VOIDmode)
1679 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1680 if (size % align != 0)
1681 size = CEIL (size, align) * align;
1682 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1683 emit_move_insn (reg, adjust_address (result, mode, size));
1685 push_to_sequence (call_fusage);
1686 emit_use (reg);
1687 call_fusage = get_insns ();
1688 end_sequence ();
1689 size += GET_MODE_SIZE (mode);
1692 /* Put the USE insns before the return. */
1693 emit_insn (call_fusage);
1695 /* Return whatever values was restored by jumping directly to the end
1696 of the function. */
1697 expand_naked_return ();
1700 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1702 static enum type_class
1703 type_to_class (tree type)
1705 switch (TREE_CODE (type))
1707 case VOID_TYPE: return void_type_class;
1708 case INTEGER_TYPE: return integer_type_class;
1709 case ENUMERAL_TYPE: return enumeral_type_class;
1710 case BOOLEAN_TYPE: return boolean_type_class;
1711 case POINTER_TYPE: return pointer_type_class;
1712 case REFERENCE_TYPE: return reference_type_class;
1713 case OFFSET_TYPE: return offset_type_class;
1714 case REAL_TYPE: return real_type_class;
1715 case COMPLEX_TYPE: return complex_type_class;
1716 case FUNCTION_TYPE: return function_type_class;
1717 case METHOD_TYPE: return method_type_class;
1718 case RECORD_TYPE: return record_type_class;
1719 case UNION_TYPE:
1720 case QUAL_UNION_TYPE: return union_type_class;
1721 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1722 ? string_type_class : array_type_class);
1723 case LANG_TYPE: return lang_type_class;
1724 default: return no_type_class;
1728 /* Expand a call EXP to __builtin_classify_type. */
1730 static rtx
1731 expand_builtin_classify_type (tree exp)
1733 if (call_expr_nargs (exp))
1734 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1735 return GEN_INT (no_type_class);
1738 /* This helper macro, meant to be used in mathfn_built_in below,
1739 determines which among a set of three builtin math functions is
1740 appropriate for a given type mode. The `F' and `L' cases are
1741 automatically generated from the `double' case. */
1742 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1743 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1744 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1745 fcodel = BUILT_IN_MATHFN##L ; break;
1746 /* Similar to above, but appends _R after any F/L suffix. */
1747 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1748 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1749 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1750 fcodel = BUILT_IN_MATHFN##L_R ; break;
1752 /* Return mathematic function equivalent to FN but operating directly
1753 on TYPE, if available. If IMPLICIT is true find the function in
1754 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1755 can't do the conversion, return zero. */
1757 static tree
1758 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1760 tree const *const fn_arr
1761 = implicit ? implicit_built_in_decls : built_in_decls;
1762 enum built_in_function fcode, fcodef, fcodel;
1764 switch (fn)
1766 CASE_MATHFN (BUILT_IN_ACOS)
1767 CASE_MATHFN (BUILT_IN_ACOSH)
1768 CASE_MATHFN (BUILT_IN_ASIN)
1769 CASE_MATHFN (BUILT_IN_ASINH)
1770 CASE_MATHFN (BUILT_IN_ATAN)
1771 CASE_MATHFN (BUILT_IN_ATAN2)
1772 CASE_MATHFN (BUILT_IN_ATANH)
1773 CASE_MATHFN (BUILT_IN_CBRT)
1774 CASE_MATHFN (BUILT_IN_CEIL)
1775 CASE_MATHFN (BUILT_IN_CEXPI)
1776 CASE_MATHFN (BUILT_IN_COPYSIGN)
1777 CASE_MATHFN (BUILT_IN_COS)
1778 CASE_MATHFN (BUILT_IN_COSH)
1779 CASE_MATHFN (BUILT_IN_DREM)
1780 CASE_MATHFN (BUILT_IN_ERF)
1781 CASE_MATHFN (BUILT_IN_ERFC)
1782 CASE_MATHFN (BUILT_IN_EXP)
1783 CASE_MATHFN (BUILT_IN_EXP10)
1784 CASE_MATHFN (BUILT_IN_EXP2)
1785 CASE_MATHFN (BUILT_IN_EXPM1)
1786 CASE_MATHFN (BUILT_IN_FABS)
1787 CASE_MATHFN (BUILT_IN_FDIM)
1788 CASE_MATHFN (BUILT_IN_FLOOR)
1789 CASE_MATHFN (BUILT_IN_FMA)
1790 CASE_MATHFN (BUILT_IN_FMAX)
1791 CASE_MATHFN (BUILT_IN_FMIN)
1792 CASE_MATHFN (BUILT_IN_FMOD)
1793 CASE_MATHFN (BUILT_IN_FREXP)
1794 CASE_MATHFN (BUILT_IN_GAMMA)
1795 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1796 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1797 CASE_MATHFN (BUILT_IN_HYPOT)
1798 CASE_MATHFN (BUILT_IN_ILOGB)
1799 CASE_MATHFN (BUILT_IN_INF)
1800 CASE_MATHFN (BUILT_IN_ISINF)
1801 CASE_MATHFN (BUILT_IN_J0)
1802 CASE_MATHFN (BUILT_IN_J1)
1803 CASE_MATHFN (BUILT_IN_JN)
1804 CASE_MATHFN (BUILT_IN_LCEIL)
1805 CASE_MATHFN (BUILT_IN_LDEXP)
1806 CASE_MATHFN (BUILT_IN_LFLOOR)
1807 CASE_MATHFN (BUILT_IN_LGAMMA)
1808 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1809 CASE_MATHFN (BUILT_IN_LLCEIL)
1810 CASE_MATHFN (BUILT_IN_LLFLOOR)
1811 CASE_MATHFN (BUILT_IN_LLRINT)
1812 CASE_MATHFN (BUILT_IN_LLROUND)
1813 CASE_MATHFN (BUILT_IN_LOG)
1814 CASE_MATHFN (BUILT_IN_LOG10)
1815 CASE_MATHFN (BUILT_IN_LOG1P)
1816 CASE_MATHFN (BUILT_IN_LOG2)
1817 CASE_MATHFN (BUILT_IN_LOGB)
1818 CASE_MATHFN (BUILT_IN_LRINT)
1819 CASE_MATHFN (BUILT_IN_LROUND)
1820 CASE_MATHFN (BUILT_IN_MODF)
1821 CASE_MATHFN (BUILT_IN_NAN)
1822 CASE_MATHFN (BUILT_IN_NANS)
1823 CASE_MATHFN (BUILT_IN_NEARBYINT)
1824 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1825 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1826 CASE_MATHFN (BUILT_IN_POW)
1827 CASE_MATHFN (BUILT_IN_POWI)
1828 CASE_MATHFN (BUILT_IN_POW10)
1829 CASE_MATHFN (BUILT_IN_REMAINDER)
1830 CASE_MATHFN (BUILT_IN_REMQUO)
1831 CASE_MATHFN (BUILT_IN_RINT)
1832 CASE_MATHFN (BUILT_IN_ROUND)
1833 CASE_MATHFN (BUILT_IN_SCALB)
1834 CASE_MATHFN (BUILT_IN_SCALBLN)
1835 CASE_MATHFN (BUILT_IN_SCALBN)
1836 CASE_MATHFN (BUILT_IN_SIGNBIT)
1837 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1838 CASE_MATHFN (BUILT_IN_SIN)
1839 CASE_MATHFN (BUILT_IN_SINCOS)
1840 CASE_MATHFN (BUILT_IN_SINH)
1841 CASE_MATHFN (BUILT_IN_SQRT)
1842 CASE_MATHFN (BUILT_IN_TAN)
1843 CASE_MATHFN (BUILT_IN_TANH)
1844 CASE_MATHFN (BUILT_IN_TGAMMA)
1845 CASE_MATHFN (BUILT_IN_TRUNC)
1846 CASE_MATHFN (BUILT_IN_Y0)
1847 CASE_MATHFN (BUILT_IN_Y1)
1848 CASE_MATHFN (BUILT_IN_YN)
1850 default:
1851 return NULL_TREE;
1854 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1855 return fn_arr[fcode];
1856 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1857 return fn_arr[fcodef];
1858 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1859 return fn_arr[fcodel];
1860 else
1861 return NULL_TREE;
1864 /* Like mathfn_built_in_1(), but always use the implicit array. */
1866 tree
1867 mathfn_built_in (tree type, enum built_in_function fn)
1869 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1872 /* If errno must be maintained, expand the RTL to check if the result,
1873 TARGET, of a built-in function call, EXP, is NaN, and if so set
1874 errno to EDOM. */
1876 static void
1877 expand_errno_check (tree exp, rtx target)
1879 rtx lab = gen_label_rtx ();
1881 /* Test the result; if it is NaN, set errno=EDOM because
1882 the argument was not in the domain. */
1883 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1884 NULL_RTX, NULL_RTX, lab,
1885 /* The jump is very likely. */
1886 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1888 #ifdef TARGET_EDOM
1889 /* If this built-in doesn't throw an exception, set errno directly. */
1890 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1892 #ifdef GEN_ERRNO_RTX
1893 rtx errno_rtx = GEN_ERRNO_RTX;
1894 #else
1895 rtx errno_rtx
1896 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1897 #endif
1898 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1899 emit_label (lab);
1900 return;
1902 #endif
1904 /* Make sure the library call isn't expanded as a tail call. */
1905 CALL_EXPR_TAILCALL (exp) = 0;
1907 /* We can't set errno=EDOM directly; let the library call do it.
1908 Pop the arguments right away in case the call gets deleted. */
1909 NO_DEFER_POP;
1910 expand_call (exp, target, 0);
1911 OK_DEFER_POP;
1912 emit_label (lab);
1915 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1916 Return NULL_RTX if a normal call should be emitted rather than expanding
1917 the function in-line. EXP is the expression that is a call to the builtin
1918 function; if convenient, the result should be placed in TARGET.
1919 SUBTARGET may be used as the target for computing one of EXP's operands. */
1921 static rtx
1922 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1924 optab builtin_optab;
1925 rtx op0, insns;
1926 tree fndecl = get_callee_fndecl (exp);
1927 enum machine_mode mode;
1928 bool errno_set = false;
1929 tree arg;
1931 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1932 return NULL_RTX;
1934 arg = CALL_EXPR_ARG (exp, 0);
1936 switch (DECL_FUNCTION_CODE (fndecl))
1938 CASE_FLT_FN (BUILT_IN_SQRT):
1939 errno_set = ! tree_expr_nonnegative_p (arg);
1940 builtin_optab = sqrt_optab;
1941 break;
1942 CASE_FLT_FN (BUILT_IN_EXP):
1943 errno_set = true; builtin_optab = exp_optab; break;
1944 CASE_FLT_FN (BUILT_IN_EXP10):
1945 CASE_FLT_FN (BUILT_IN_POW10):
1946 errno_set = true; builtin_optab = exp10_optab; break;
1947 CASE_FLT_FN (BUILT_IN_EXP2):
1948 errno_set = true; builtin_optab = exp2_optab; break;
1949 CASE_FLT_FN (BUILT_IN_EXPM1):
1950 errno_set = true; builtin_optab = expm1_optab; break;
1951 CASE_FLT_FN (BUILT_IN_LOGB):
1952 errno_set = true; builtin_optab = logb_optab; break;
1953 CASE_FLT_FN (BUILT_IN_LOG):
1954 errno_set = true; builtin_optab = log_optab; break;
1955 CASE_FLT_FN (BUILT_IN_LOG10):
1956 errno_set = true; builtin_optab = log10_optab; break;
1957 CASE_FLT_FN (BUILT_IN_LOG2):
1958 errno_set = true; builtin_optab = log2_optab; break;
1959 CASE_FLT_FN (BUILT_IN_LOG1P):
1960 errno_set = true; builtin_optab = log1p_optab; break;
1961 CASE_FLT_FN (BUILT_IN_ASIN):
1962 builtin_optab = asin_optab; break;
1963 CASE_FLT_FN (BUILT_IN_ACOS):
1964 builtin_optab = acos_optab; break;
1965 CASE_FLT_FN (BUILT_IN_TAN):
1966 builtin_optab = tan_optab; break;
1967 CASE_FLT_FN (BUILT_IN_ATAN):
1968 builtin_optab = atan_optab; break;
1969 CASE_FLT_FN (BUILT_IN_FLOOR):
1970 builtin_optab = floor_optab; break;
1971 CASE_FLT_FN (BUILT_IN_CEIL):
1972 builtin_optab = ceil_optab; break;
1973 CASE_FLT_FN (BUILT_IN_TRUNC):
1974 builtin_optab = btrunc_optab; break;
1975 CASE_FLT_FN (BUILT_IN_ROUND):
1976 builtin_optab = round_optab; break;
1977 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1978 builtin_optab = nearbyint_optab;
1979 if (flag_trapping_math)
1980 break;
1981 /* Else fallthrough and expand as rint. */
1982 CASE_FLT_FN (BUILT_IN_RINT):
1983 builtin_optab = rint_optab; break;
1984 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1985 builtin_optab = significand_optab; break;
1986 default:
1987 gcc_unreachable ();
1990 /* Make a suitable register to place result in. */
1991 mode = TYPE_MODE (TREE_TYPE (exp));
1993 if (! flag_errno_math || ! HONOR_NANS (mode))
1994 errno_set = false;
1996 /* Before working hard, check whether the instruction is available. */
1997 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1999 target = gen_reg_rtx (mode);
2001 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2002 need to expand the argument again. This way, we will not perform
2003 side-effects more the once. */
2004 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2006 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2008 start_sequence ();
2010 /* Compute into TARGET.
2011 Set TARGET to wherever the result comes back. */
2012 target = expand_unop (mode, builtin_optab, op0, target, 0);
2014 if (target != 0)
2016 if (errno_set)
2017 expand_errno_check (exp, target);
2019 /* Output the entire sequence. */
2020 insns = get_insns ();
2021 end_sequence ();
2022 emit_insn (insns);
2023 return target;
2026 /* If we were unable to expand via the builtin, stop the sequence
2027 (without outputting the insns) and call to the library function
2028 with the stabilized argument list. */
2029 end_sequence ();
2032 return expand_call (exp, target, target == const0_rtx);
2035 /* Expand a call to the builtin binary math functions (pow and atan2).
2036 Return NULL_RTX if a normal call should be emitted rather than expanding the
2037 function in-line. EXP is the expression that is a call to the builtin
2038 function; if convenient, the result should be placed in TARGET.
2039 SUBTARGET may be used as the target for computing one of EXP's
2040 operands. */
2042 static rtx
2043 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2045 optab builtin_optab;
2046 rtx op0, op1, insns;
2047 int op1_type = REAL_TYPE;
2048 tree fndecl = get_callee_fndecl (exp);
2049 tree arg0, arg1;
2050 enum machine_mode mode;
2051 bool errno_set = true;
2053 switch (DECL_FUNCTION_CODE (fndecl))
2055 CASE_FLT_FN (BUILT_IN_SCALBN):
2056 CASE_FLT_FN (BUILT_IN_SCALBLN):
2057 CASE_FLT_FN (BUILT_IN_LDEXP):
2058 op1_type = INTEGER_TYPE;
2059 default:
2060 break;
2063 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2064 return NULL_RTX;
2066 arg0 = CALL_EXPR_ARG (exp, 0);
2067 arg1 = CALL_EXPR_ARG (exp, 1);
2069 switch (DECL_FUNCTION_CODE (fndecl))
2071 CASE_FLT_FN (BUILT_IN_POW):
2072 builtin_optab = pow_optab; break;
2073 CASE_FLT_FN (BUILT_IN_ATAN2):
2074 builtin_optab = atan2_optab; break;
2075 CASE_FLT_FN (BUILT_IN_SCALB):
2076 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2077 return 0;
2078 builtin_optab = scalb_optab; break;
2079 CASE_FLT_FN (BUILT_IN_SCALBN):
2080 CASE_FLT_FN (BUILT_IN_SCALBLN):
2081 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2082 return 0;
2083 /* Fall through... */
2084 CASE_FLT_FN (BUILT_IN_LDEXP):
2085 builtin_optab = ldexp_optab; break;
2086 CASE_FLT_FN (BUILT_IN_FMOD):
2087 builtin_optab = fmod_optab; break;
2088 CASE_FLT_FN (BUILT_IN_REMAINDER):
2089 CASE_FLT_FN (BUILT_IN_DREM):
2090 builtin_optab = remainder_optab; break;
2091 default:
2092 gcc_unreachable ();
2095 /* Make a suitable register to place result in. */
2096 mode = TYPE_MODE (TREE_TYPE (exp));
2098 /* Before working hard, check whether the instruction is available. */
2099 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2100 return NULL_RTX;
2102 target = gen_reg_rtx (mode);
2104 if (! flag_errno_math || ! HONOR_NANS (mode))
2105 errno_set = false;
2107 /* Always stabilize the argument list. */
2108 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2109 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2111 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2112 op1 = expand_normal (arg1);
2114 start_sequence ();
2116 /* Compute into TARGET.
2117 Set TARGET to wherever the result comes back. */
2118 target = expand_binop (mode, builtin_optab, op0, op1,
2119 target, 0, OPTAB_DIRECT);
2121 /* If we were unable to expand via the builtin, stop the sequence
2122 (without outputting the insns) and call to the library function
2123 with the stabilized argument list. */
2124 if (target == 0)
2126 end_sequence ();
2127 return expand_call (exp, target, target == const0_rtx);
2130 if (errno_set)
2131 expand_errno_check (exp, target);
2133 /* Output the entire sequence. */
2134 insns = get_insns ();
2135 end_sequence ();
2136 emit_insn (insns);
2138 return target;
2141 /* Expand a call to the builtin sin and cos math functions.
2142 Return NULL_RTX if a normal call should be emitted rather than expanding the
2143 function in-line. EXP is the expression that is a call to the builtin
2144 function; if convenient, the result should be placed in TARGET.
2145 SUBTARGET may be used as the target for computing one of EXP's
2146 operands. */
2148 static rtx
2149 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2151 optab builtin_optab;
2152 rtx op0, insns;
2153 tree fndecl = get_callee_fndecl (exp);
2154 enum machine_mode mode;
2155 tree arg;
2157 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2158 return NULL_RTX;
2160 arg = CALL_EXPR_ARG (exp, 0);
2162 switch (DECL_FUNCTION_CODE (fndecl))
2164 CASE_FLT_FN (BUILT_IN_SIN):
2165 CASE_FLT_FN (BUILT_IN_COS):
2166 builtin_optab = sincos_optab; break;
2167 default:
2168 gcc_unreachable ();
2171 /* Make a suitable register to place result in. */
2172 mode = TYPE_MODE (TREE_TYPE (exp));
2174 /* Check if sincos insn is available, otherwise fallback
2175 to sin or cos insn. */
2176 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2177 switch (DECL_FUNCTION_CODE (fndecl))
2179 CASE_FLT_FN (BUILT_IN_SIN):
2180 builtin_optab = sin_optab; break;
2181 CASE_FLT_FN (BUILT_IN_COS):
2182 builtin_optab = cos_optab; break;
2183 default:
2184 gcc_unreachable ();
2187 /* Before working hard, check whether the instruction is available. */
2188 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2190 target = gen_reg_rtx (mode);
2192 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2193 need to expand the argument again. This way, we will not perform
2194 side-effects more the once. */
2195 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2197 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2199 start_sequence ();
2201 /* Compute into TARGET.
2202 Set TARGET to wherever the result comes back. */
2203 if (builtin_optab == sincos_optab)
2205 int result;
2207 switch (DECL_FUNCTION_CODE (fndecl))
2209 CASE_FLT_FN (BUILT_IN_SIN):
2210 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2211 break;
2212 CASE_FLT_FN (BUILT_IN_COS):
2213 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2214 break;
2215 default:
2216 gcc_unreachable ();
2218 gcc_assert (result);
2220 else
2222 target = expand_unop (mode, builtin_optab, op0, target, 0);
2225 if (target != 0)
2227 /* Output the entire sequence. */
2228 insns = get_insns ();
2229 end_sequence ();
2230 emit_insn (insns);
2231 return target;
2234 /* If we were unable to expand via the builtin, stop the sequence
2235 (without outputting the insns) and call to the library function
2236 with the stabilized argument list. */
2237 end_sequence ();
2240 target = expand_call (exp, target, target == const0_rtx);
2242 return target;
2245 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2246 return an RTL instruction code that implements the functionality.
2247 If that isn't possible or available return CODE_FOR_nothing. */
2249 static enum insn_code
2250 interclass_mathfn_icode (tree arg, tree fndecl)
2252 bool errno_set = false;
2253 optab builtin_optab = 0;
2254 enum machine_mode mode;
2256 switch (DECL_FUNCTION_CODE (fndecl))
2258 CASE_FLT_FN (BUILT_IN_ILOGB):
2259 errno_set = true; builtin_optab = ilogb_optab; break;
2260 CASE_FLT_FN (BUILT_IN_ISINF):
2261 builtin_optab = isinf_optab; break;
2262 case BUILT_IN_ISNORMAL:
2263 case BUILT_IN_ISFINITE:
2264 CASE_FLT_FN (BUILT_IN_FINITE):
2265 case BUILT_IN_FINITED32:
2266 case BUILT_IN_FINITED64:
2267 case BUILT_IN_FINITED128:
2268 case BUILT_IN_ISINFD32:
2269 case BUILT_IN_ISINFD64:
2270 case BUILT_IN_ISINFD128:
2271 /* These builtins have no optabs (yet). */
2272 break;
2273 default:
2274 gcc_unreachable ();
2277 /* There's no easy way to detect the case we need to set EDOM. */
2278 if (flag_errno_math && errno_set)
2279 return CODE_FOR_nothing;
2281 /* Optab mode depends on the mode of the input argument. */
2282 mode = TYPE_MODE (TREE_TYPE (arg));
2284 if (builtin_optab)
2285 return optab_handler (builtin_optab, mode)->insn_code;
2286 return CODE_FOR_nothing;
2289 /* Expand a call to one of the builtin math functions that operate on
2290 floating point argument and output an integer result (ilogb, isinf,
2291 isnan, etc).
2292 Return 0 if a normal call should be emitted rather than expanding the
2293 function in-line. EXP is the expression that is a call to the builtin
2294 function; if convenient, the result should be placed in TARGET.
2295 SUBTARGET may be used as the target for computing one of EXP's operands. */
2297 static rtx
2298 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2300 enum insn_code icode = CODE_FOR_nothing;
2301 rtx op0;
2302 tree fndecl = get_callee_fndecl (exp);
2303 enum machine_mode mode;
2304 tree arg;
2306 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2307 return NULL_RTX;
2309 arg = CALL_EXPR_ARG (exp, 0);
2310 icode = interclass_mathfn_icode (arg, fndecl);
2311 mode = TYPE_MODE (TREE_TYPE (arg));
2313 if (icode != CODE_FOR_nothing)
2315 /* Make a suitable register to place result in. */
2316 if (!target
2317 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2318 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2320 gcc_assert (insn_data[icode].operand[0].predicate
2321 (target, GET_MODE (target)));
2323 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2324 need to expand the argument again. This way, we will not perform
2325 side-effects more the once. */
2326 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2328 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2330 if (mode != GET_MODE (op0))
2331 op0 = convert_to_mode (mode, op0, 0);
2333 /* Compute into TARGET.
2334 Set TARGET to wherever the result comes back. */
2335 emit_unop_insn (icode, target, op0, UNKNOWN);
2336 return target;
2339 return NULL_RTX;
2342 /* Expand a call to the builtin sincos math function.
2343 Return NULL_RTX if a normal call should be emitted rather than expanding the
2344 function in-line. EXP is the expression that is a call to the builtin
2345 function. */
2347 static rtx
2348 expand_builtin_sincos (tree exp)
2350 rtx op0, op1, op2, target1, target2;
2351 enum machine_mode mode;
2352 tree arg, sinp, cosp;
2353 int result;
2354 location_t loc = EXPR_LOCATION (exp);
2356 if (!validate_arglist (exp, REAL_TYPE,
2357 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2358 return NULL_RTX;
2360 arg = CALL_EXPR_ARG (exp, 0);
2361 sinp = CALL_EXPR_ARG (exp, 1);
2362 cosp = CALL_EXPR_ARG (exp, 2);
2364 /* Make a suitable register to place result in. */
2365 mode = TYPE_MODE (TREE_TYPE (arg));
2367 /* Check if sincos insn is available, otherwise emit the call. */
2368 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2369 return NULL_RTX;
2371 target1 = gen_reg_rtx (mode);
2372 target2 = gen_reg_rtx (mode);
2374 op0 = expand_normal (arg);
2375 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2376 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2378 /* Compute into target1 and target2.
2379 Set TARGET to wherever the result comes back. */
2380 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2381 gcc_assert (result);
2383 /* Move target1 and target2 to the memory locations indicated
2384 by op1 and op2. */
2385 emit_move_insn (op1, target1);
2386 emit_move_insn (op2, target2);
2388 return const0_rtx;
2391 /* Expand a call to the internal cexpi builtin to the sincos math function.
2392 EXP is the expression that is a call to the builtin function; if convenient,
2393 the result should be placed in TARGET. SUBTARGET may be used as the target
2394 for computing one of EXP's operands. */
2396 static rtx
2397 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2399 tree fndecl = get_callee_fndecl (exp);
2400 tree arg, type;
2401 enum machine_mode mode;
2402 rtx op0, op1, op2;
2403 location_t loc = EXPR_LOCATION (exp);
2405 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2406 return NULL_RTX;
2408 arg = CALL_EXPR_ARG (exp, 0);
2409 type = TREE_TYPE (arg);
2410 mode = TYPE_MODE (TREE_TYPE (arg));
2412 /* Try expanding via a sincos optab, fall back to emitting a libcall
2413 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2414 is only generated from sincos, cexp or if we have either of them. */
2415 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2417 op1 = gen_reg_rtx (mode);
2418 op2 = gen_reg_rtx (mode);
2420 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2422 /* Compute into op1 and op2. */
2423 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2425 else if (TARGET_HAS_SINCOS)
2427 tree call, fn = NULL_TREE;
2428 tree top1, top2;
2429 rtx op1a, op2a;
2431 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2432 fn = built_in_decls[BUILT_IN_SINCOSF];
2433 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2434 fn = built_in_decls[BUILT_IN_SINCOS];
2435 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2436 fn = built_in_decls[BUILT_IN_SINCOSL];
2437 else
2438 gcc_unreachable ();
2440 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2441 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2442 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2443 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2444 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2445 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2447 /* Make sure not to fold the sincos call again. */
2448 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2449 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2450 call, 3, arg, top1, top2));
2452 else
2454 tree call, fn = NULL_TREE, narg;
2455 tree ctype = build_complex_type (type);
2457 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2458 fn = built_in_decls[BUILT_IN_CEXPF];
2459 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2460 fn = built_in_decls[BUILT_IN_CEXP];
2461 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2462 fn = built_in_decls[BUILT_IN_CEXPL];
2463 else
2464 gcc_unreachable ();
2466 /* If we don't have a decl for cexp create one. This is the
2467 friendliest fallback if the user calls __builtin_cexpi
2468 without full target C99 function support. */
2469 if (fn == NULL_TREE)
2471 tree fntype;
2472 const char *name = NULL;
2474 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2475 name = "cexpf";
2476 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2477 name = "cexp";
2478 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2479 name = "cexpl";
2481 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2482 fn = build_fn_decl (name, fntype);
2485 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2486 build_real (type, dconst0), arg);
2488 /* Make sure not to fold the cexp call again. */
2489 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2490 return expand_expr (build_call_nary (ctype, call, 1, narg),
2491 target, VOIDmode, EXPAND_NORMAL);
2494 /* Now build the proper return type. */
2495 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2496 make_tree (TREE_TYPE (arg), op2),
2497 make_tree (TREE_TYPE (arg), op1)),
2498 target, VOIDmode, EXPAND_NORMAL);
2501 /* Conveniently construct a function call expression. FNDECL names the
2502 function to be called, N is the number of arguments, and the "..."
2503 parameters are the argument expressions. Unlike build_call_exr
2504 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2506 static tree
2507 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2509 va_list ap;
2510 tree fntype = TREE_TYPE (fndecl);
2511 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2513 va_start (ap, n);
2514 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2515 va_end (ap);
2516 SET_EXPR_LOCATION (fn, loc);
2517 return fn;
2519 #define build_call_nofold(...) \
2520 build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
2522 /* Expand a call to one of the builtin rounding functions gcc defines
2523 as an extension (lfloor and lceil). As these are gcc extensions we
2524 do not need to worry about setting errno to EDOM.
2525 If expanding via optab fails, lower expression to (int)(floor(x)).
2526 EXP is the expression that is a call to the builtin function;
2527 if convenient, the result should be placed in TARGET. */
2529 static rtx
2530 expand_builtin_int_roundingfn (tree exp, rtx target)
2532 convert_optab builtin_optab;
2533 rtx op0, insns, tmp;
2534 tree fndecl = get_callee_fndecl (exp);
2535 enum built_in_function fallback_fn;
2536 tree fallback_fndecl;
2537 enum machine_mode mode;
2538 tree arg;
2540 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2541 gcc_unreachable ();
2543 arg = CALL_EXPR_ARG (exp, 0);
2545 switch (DECL_FUNCTION_CODE (fndecl))
2547 CASE_FLT_FN (BUILT_IN_LCEIL):
2548 CASE_FLT_FN (BUILT_IN_LLCEIL):
2549 builtin_optab = lceil_optab;
2550 fallback_fn = BUILT_IN_CEIL;
2551 break;
2553 CASE_FLT_FN (BUILT_IN_LFLOOR):
2554 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2555 builtin_optab = lfloor_optab;
2556 fallback_fn = BUILT_IN_FLOOR;
2557 break;
2559 default:
2560 gcc_unreachable ();
2563 /* Make a suitable register to place result in. */
2564 mode = TYPE_MODE (TREE_TYPE (exp));
2566 target = gen_reg_rtx (mode);
2568 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2569 need to expand the argument again. This way, we will not perform
2570 side-effects more the once. */
2571 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2573 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2575 start_sequence ();
2577 /* Compute into TARGET. */
2578 if (expand_sfix_optab (target, op0, builtin_optab))
2580 /* Output the entire sequence. */
2581 insns = get_insns ();
2582 end_sequence ();
2583 emit_insn (insns);
2584 return target;
2587 /* If we were unable to expand via the builtin, stop the sequence
2588 (without outputting the insns). */
2589 end_sequence ();
2591 /* Fall back to floating point rounding optab. */
2592 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2594 /* For non-C99 targets we may end up without a fallback fndecl here
2595 if the user called __builtin_lfloor directly. In this case emit
2596 a call to the floor/ceil variants nevertheless. This should result
2597 in the best user experience for not full C99 targets. */
2598 if (fallback_fndecl == NULL_TREE)
2600 tree fntype;
2601 const char *name = NULL;
2603 switch (DECL_FUNCTION_CODE (fndecl))
2605 case BUILT_IN_LCEIL:
2606 case BUILT_IN_LLCEIL:
2607 name = "ceil";
2608 break;
2609 case BUILT_IN_LCEILF:
2610 case BUILT_IN_LLCEILF:
2611 name = "ceilf";
2612 break;
2613 case BUILT_IN_LCEILL:
2614 case BUILT_IN_LLCEILL:
2615 name = "ceill";
2616 break;
2617 case BUILT_IN_LFLOOR:
2618 case BUILT_IN_LLFLOOR:
2619 name = "floor";
2620 break;
2621 case BUILT_IN_LFLOORF:
2622 case BUILT_IN_LLFLOORF:
2623 name = "floorf";
2624 break;
2625 case BUILT_IN_LFLOORL:
2626 case BUILT_IN_LLFLOORL:
2627 name = "floorl";
2628 break;
2629 default:
2630 gcc_unreachable ();
2633 fntype = build_function_type_list (TREE_TYPE (arg),
2634 TREE_TYPE (arg), NULL_TREE);
2635 fallback_fndecl = build_fn_decl (name, fntype);
2638 exp = build_call_nofold (fallback_fndecl, 1, arg);
2640 tmp = expand_normal (exp);
2642 /* Truncate the result of floating point optab to integer
2643 via expand_fix (). */
2644 target = gen_reg_rtx (mode);
2645 expand_fix (target, tmp, 0);
2647 return target;
2650 /* Expand a call to one of the builtin math functions doing integer
2651 conversion (lrint).
2652 Return 0 if a normal call should be emitted rather than expanding the
2653 function in-line. EXP is the expression that is a call to the builtin
2654 function; if convenient, the result should be placed in TARGET. */
2656 static rtx
2657 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2659 convert_optab builtin_optab;
2660 rtx op0, insns;
2661 tree fndecl = get_callee_fndecl (exp);
2662 tree arg;
2663 enum machine_mode mode;
2665 /* There's no easy way to detect the case we need to set EDOM. */
2666 if (flag_errno_math)
2667 return NULL_RTX;
2669 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2670 gcc_unreachable ();
2672 arg = CALL_EXPR_ARG (exp, 0);
2674 switch (DECL_FUNCTION_CODE (fndecl))
2676 CASE_FLT_FN (BUILT_IN_LRINT):
2677 CASE_FLT_FN (BUILT_IN_LLRINT):
2678 builtin_optab = lrint_optab; break;
2679 CASE_FLT_FN (BUILT_IN_LROUND):
2680 CASE_FLT_FN (BUILT_IN_LLROUND):
2681 builtin_optab = lround_optab; break;
2682 default:
2683 gcc_unreachable ();
2686 /* Make a suitable register to place result in. */
2687 mode = TYPE_MODE (TREE_TYPE (exp));
2689 target = gen_reg_rtx (mode);
2691 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2692 need to expand the argument again. This way, we will not perform
2693 side-effects more the once. */
2694 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2696 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2698 start_sequence ();
2700 if (expand_sfix_optab (target, op0, builtin_optab))
2702 /* Output the entire sequence. */
2703 insns = get_insns ();
2704 end_sequence ();
2705 emit_insn (insns);
2706 return target;
2709 /* If we were unable to expand via the builtin, stop the sequence
2710 (without outputting the insns) and call to the library function
2711 with the stabilized argument list. */
2712 end_sequence ();
2714 target = expand_call (exp, target, target == const0_rtx);
2716 return target;
2719 /* To evaluate powi(x,n), the floating point value x raised to the
2720 constant integer exponent n, we use a hybrid algorithm that
2721 combines the "window method" with look-up tables. For an
2722 introduction to exponentiation algorithms and "addition chains",
2723 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2724 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2725 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2726 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2728 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2729 multiplications to inline before calling the system library's pow
2730 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2731 so this default never requires calling pow, powf or powl. */
2733 #ifndef POWI_MAX_MULTS
2734 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2735 #endif
2737 /* The size of the "optimal power tree" lookup table. All
2738 exponents less than this value are simply looked up in the
2739 powi_table below. This threshold is also used to size the
2740 cache of pseudo registers that hold intermediate results. */
2741 #define POWI_TABLE_SIZE 256
2743 /* The size, in bits of the window, used in the "window method"
2744 exponentiation algorithm. This is equivalent to a radix of
2745 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2746 #define POWI_WINDOW_SIZE 3
2748 /* The following table is an efficient representation of an
2749 "optimal power tree". For each value, i, the corresponding
2750 value, j, in the table states than an optimal evaluation
2751 sequence for calculating pow(x,i) can be found by evaluating
2752 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2753 100 integers is given in Knuth's "Seminumerical algorithms". */
2755 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2757 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2758 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2759 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2760 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2761 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2762 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2763 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2764 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2765 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2766 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2767 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2768 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2769 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2770 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2771 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2772 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2773 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2774 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2775 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2776 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2777 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2778 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2779 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2780 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2781 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2782 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2783 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2784 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2785 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2786 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2787 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2788 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2792 /* Return the number of multiplications required to calculate
2793 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2794 subroutine of powi_cost. CACHE is an array indicating
2795 which exponents have already been calculated. */
2797 static int
2798 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2800 /* If we've already calculated this exponent, then this evaluation
2801 doesn't require any additional multiplications. */
2802 if (cache[n])
2803 return 0;
2805 cache[n] = true;
2806 return powi_lookup_cost (n - powi_table[n], cache)
2807 + powi_lookup_cost (powi_table[n], cache) + 1;
2810 /* Return the number of multiplications required to calculate
2811 powi(x,n) for an arbitrary x, given the exponent N. This
2812 function needs to be kept in sync with expand_powi below. */
2814 static int
2815 powi_cost (HOST_WIDE_INT n)
2817 bool cache[POWI_TABLE_SIZE];
2818 unsigned HOST_WIDE_INT digit;
2819 unsigned HOST_WIDE_INT val;
2820 int result;
2822 if (n == 0)
2823 return 0;
2825 /* Ignore the reciprocal when calculating the cost. */
2826 val = (n < 0) ? -n : n;
2828 /* Initialize the exponent cache. */
2829 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2830 cache[1] = true;
2832 result = 0;
2834 while (val >= POWI_TABLE_SIZE)
2836 if (val & 1)
2838 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2839 result += powi_lookup_cost (digit, cache)
2840 + POWI_WINDOW_SIZE + 1;
2841 val >>= POWI_WINDOW_SIZE;
2843 else
2845 val >>= 1;
2846 result++;
2850 return result + powi_lookup_cost (val, cache);
2853 /* Recursive subroutine of expand_powi. This function takes the array,
2854 CACHE, of already calculated exponents and an exponent N and returns
2855 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2857 static rtx
2858 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2860 unsigned HOST_WIDE_INT digit;
2861 rtx target, result;
2862 rtx op0, op1;
2864 if (n < POWI_TABLE_SIZE)
2866 if (cache[n])
2867 return cache[n];
2869 target = gen_reg_rtx (mode);
2870 cache[n] = target;
2872 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2873 op1 = expand_powi_1 (mode, powi_table[n], cache);
2875 else if (n & 1)
2877 target = gen_reg_rtx (mode);
2878 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2879 op0 = expand_powi_1 (mode, n - digit, cache);
2880 op1 = expand_powi_1 (mode, digit, cache);
2882 else
2884 target = gen_reg_rtx (mode);
2885 op0 = expand_powi_1 (mode, n >> 1, cache);
2886 op1 = op0;
2889 result = expand_mult (mode, op0, op1, target, 0);
2890 if (result != target)
2891 emit_move_insn (target, result);
2892 return target;
2895 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2896 floating point operand in mode MODE, and N is the exponent. This
2897 function needs to be kept in sync with powi_cost above. */
2899 static rtx
2900 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2902 rtx cache[POWI_TABLE_SIZE];
2903 rtx result;
2905 if (n == 0)
2906 return CONST1_RTX (mode);
2908 memset (cache, 0, sizeof (cache));
2909 cache[1] = x;
2911 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2913 /* If the original exponent was negative, reciprocate the result. */
2914 if (n < 0)
2915 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2916 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2918 return result;
2921 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2922 a normal call should be emitted rather than expanding the function
2923 in-line. EXP is the expression that is a call to the builtin
2924 function; if convenient, the result should be placed in TARGET. */
2926 static rtx
2927 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2929 tree arg0, arg1;
2930 tree fn, narg0;
2931 tree type = TREE_TYPE (exp);
2932 REAL_VALUE_TYPE cint, c, c2;
2933 HOST_WIDE_INT n;
2934 rtx op, op2;
2935 enum machine_mode mode = TYPE_MODE (type);
2937 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2938 return NULL_RTX;
2940 arg0 = CALL_EXPR_ARG (exp, 0);
2941 arg1 = CALL_EXPR_ARG (exp, 1);
2943 if (TREE_CODE (arg1) != REAL_CST
2944 || TREE_OVERFLOW (arg1))
2945 return expand_builtin_mathfn_2 (exp, target, subtarget);
2947 /* Handle constant exponents. */
2949 /* For integer valued exponents we can expand to an optimal multiplication
2950 sequence using expand_powi. */
2951 c = TREE_REAL_CST (arg1);
2952 n = real_to_integer (&c);
2953 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2954 if (real_identical (&c, &cint)
2955 && ((n >= -1 && n <= 2)
2956 || (flag_unsafe_math_optimizations
2957 && optimize_insn_for_speed_p ()
2958 && powi_cost (n) <= POWI_MAX_MULTS)))
2960 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2961 if (n != 1)
2963 op = force_reg (mode, op);
2964 op = expand_powi (op, mode, n);
2966 return op;
2969 narg0 = builtin_save_expr (arg0);
2971 /* If the exponent is not integer valued, check if it is half of an integer.
2972 In this case we can expand to sqrt (x) * x**(n/2). */
2973 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2974 if (fn != NULL_TREE)
2976 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2977 n = real_to_integer (&c2);
2978 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2979 if (real_identical (&c2, &cint)
2980 && ((flag_unsafe_math_optimizations
2981 && optimize_insn_for_speed_p ()
2982 && powi_cost (n/2) <= POWI_MAX_MULTS)
2983 || n == 1))
2985 tree call_expr = build_call_nofold (fn, 1, narg0);
2986 /* Use expand_expr in case the newly built call expression
2987 was folded to a non-call. */
2988 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2989 if (n != 1)
2991 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2992 op2 = force_reg (mode, op2);
2993 op2 = expand_powi (op2, mode, abs (n / 2));
2994 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2995 0, OPTAB_LIB_WIDEN);
2996 /* If the original exponent was negative, reciprocate the
2997 result. */
2998 if (n < 0)
2999 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3000 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3002 return op;
3006 /* Try if the exponent is a third of an integer. In this case
3007 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3008 different from pow (x, 1./3.) due to rounding and behavior
3009 with negative x we need to constrain this transformation to
3010 unsafe math and positive x or finite math. */
3011 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3012 if (fn != NULL_TREE
3013 && flag_unsafe_math_optimizations
3014 && (tree_expr_nonnegative_p (arg0)
3015 || !HONOR_NANS (mode)))
3017 REAL_VALUE_TYPE dconst3;
3018 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3019 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3020 real_round (&c2, mode, &c2);
3021 n = real_to_integer (&c2);
3022 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3023 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3024 real_convert (&c2, mode, &c2);
3025 if (real_identical (&c2, &c)
3026 && ((optimize_insn_for_speed_p ()
3027 && powi_cost (n/3) <= POWI_MAX_MULTS)
3028 || n == 1))
3030 tree call_expr = build_call_nofold (fn, 1,narg0);
3031 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3032 if (abs (n) % 3 == 2)
3033 op = expand_simple_binop (mode, MULT, op, op, op,
3034 0, OPTAB_LIB_WIDEN);
3035 if (n != 1)
3037 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3038 op2 = force_reg (mode, op2);
3039 op2 = expand_powi (op2, mode, abs (n / 3));
3040 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3041 0, OPTAB_LIB_WIDEN);
3042 /* If the original exponent was negative, reciprocate the
3043 result. */
3044 if (n < 0)
3045 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3046 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3048 return op;
3052 /* Fall back to optab expansion. */
3053 return expand_builtin_mathfn_2 (exp, target, subtarget);
3056 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3057 a normal call should be emitted rather than expanding the function
3058 in-line. EXP is the expression that is a call to the builtin
3059 function; if convenient, the result should be placed in TARGET. */
3061 static rtx
3062 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3064 tree arg0, arg1;
3065 rtx op0, op1;
3066 enum machine_mode mode;
3067 enum machine_mode mode2;
3069 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3070 return NULL_RTX;
3072 arg0 = CALL_EXPR_ARG (exp, 0);
3073 arg1 = CALL_EXPR_ARG (exp, 1);
3074 mode = TYPE_MODE (TREE_TYPE (exp));
3076 /* Handle constant power. */
3078 if (TREE_CODE (arg1) == INTEGER_CST
3079 && !TREE_OVERFLOW (arg1))
3081 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3083 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3084 Otherwise, check the number of multiplications required. */
3085 if ((TREE_INT_CST_HIGH (arg1) == 0
3086 || TREE_INT_CST_HIGH (arg1) == -1)
3087 && ((n >= -1 && n <= 2)
3088 || (optimize_insn_for_speed_p ()
3089 && powi_cost (n) <= POWI_MAX_MULTS)))
3091 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3092 op0 = force_reg (mode, op0);
3093 return expand_powi (op0, mode, n);
3097 /* Emit a libcall to libgcc. */
3099 /* Mode of the 2nd argument must match that of an int. */
3100 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3102 if (target == NULL_RTX)
3103 target = gen_reg_rtx (mode);
3105 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3106 if (GET_MODE (op0) != mode)
3107 op0 = convert_to_mode (mode, op0, 0);
3108 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3109 if (GET_MODE (op1) != mode2)
3110 op1 = convert_to_mode (mode2, op1, 0);
3112 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3113 target, LCT_CONST, mode, 2,
3114 op0, mode, op1, mode2);
3116 return target;
3119 /* Expand expression EXP which is a call to the strlen builtin. Return
3120 NULL_RTX if we failed the caller should emit a normal call, otherwise
3121 try to get the result in TARGET, if convenient. */
3123 static rtx
3124 expand_builtin_strlen (tree exp, rtx target,
3125 enum machine_mode target_mode)
3127 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3128 return NULL_RTX;
3129 else
3131 rtx pat;
3132 tree len;
3133 tree src = CALL_EXPR_ARG (exp, 0);
3134 rtx result, src_reg, char_rtx, before_strlen;
3135 enum machine_mode insn_mode = target_mode, char_mode;
3136 enum insn_code icode = CODE_FOR_nothing;
3137 int align;
3139 /* If the length can be computed at compile-time, return it. */
3140 len = c_strlen (src, 0);
3141 if (len)
3142 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3144 /* If the length can be computed at compile-time and is constant
3145 integer, but there are side-effects in src, evaluate
3146 src for side-effects, then return len.
3147 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3148 can be optimized into: i++; x = 3; */
3149 len = c_strlen (src, 1);
3150 if (len && TREE_CODE (len) == INTEGER_CST)
3152 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3153 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3156 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3158 /* If SRC is not a pointer type, don't do this operation inline. */
3159 if (align == 0)
3160 return NULL_RTX;
3162 /* Bail out if we can't compute strlen in the right mode. */
3163 while (insn_mode != VOIDmode)
3165 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3166 if (icode != CODE_FOR_nothing)
3167 break;
3169 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3171 if (insn_mode == VOIDmode)
3172 return NULL_RTX;
3174 /* Make a place to write the result of the instruction. */
3175 result = target;
3176 if (! (result != 0
3177 && REG_P (result)
3178 && GET_MODE (result) == insn_mode
3179 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3180 result = gen_reg_rtx (insn_mode);
3182 /* Make a place to hold the source address. We will not expand
3183 the actual source until we are sure that the expansion will
3184 not fail -- there are trees that cannot be expanded twice. */
3185 src_reg = gen_reg_rtx (Pmode);
3187 /* Mark the beginning of the strlen sequence so we can emit the
3188 source operand later. */
3189 before_strlen = get_last_insn ();
3191 char_rtx = const0_rtx;
3192 char_mode = insn_data[(int) icode].operand[2].mode;
3193 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3194 char_mode))
3195 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3197 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3198 char_rtx, GEN_INT (align));
3199 if (! pat)
3200 return NULL_RTX;
3201 emit_insn (pat);
3203 /* Now that we are assured of success, expand the source. */
3204 start_sequence ();
3205 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3206 if (pat != src_reg)
3207 emit_move_insn (src_reg, pat);
3208 pat = get_insns ();
3209 end_sequence ();
3211 if (before_strlen)
3212 emit_insn_after (pat, before_strlen);
3213 else
3214 emit_insn_before (pat, get_insns ());
3216 /* Return the value in the proper mode for this function. */
3217 if (GET_MODE (result) == target_mode)
3218 target = result;
3219 else if (target != 0)
3220 convert_move (target, result, 0);
3221 else
3222 target = convert_to_mode (target_mode, result, 0);
3224 return target;
3228 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3229 bytes from constant string DATA + OFFSET and return it as target
3230 constant. */
3232 static rtx
3233 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3234 enum machine_mode mode)
3236 const char *str = (const char *) data;
3238 gcc_assert (offset >= 0
3239 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3240 <= strlen (str) + 1));
3242 return c_readstr (str + offset, mode);
3245 /* Expand a call EXP to the memcpy builtin.
3246 Return NULL_RTX if we failed, the caller should emit a normal call,
3247 otherwise try to get the result in TARGET, if convenient (and in
3248 mode MODE if that's convenient). */
3250 static rtx
3251 expand_builtin_memcpy (tree exp, rtx target)
3253 if (!validate_arglist (exp,
3254 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3255 return NULL_RTX;
3256 else
3258 tree dest = CALL_EXPR_ARG (exp, 0);
3259 tree src = CALL_EXPR_ARG (exp, 1);
3260 tree len = CALL_EXPR_ARG (exp, 2);
3261 const char *src_str;
3262 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3263 unsigned int dest_align
3264 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3265 rtx dest_mem, src_mem, dest_addr, len_rtx;
3266 HOST_WIDE_INT expected_size = -1;
3267 unsigned int expected_align = 0;
3269 /* If DEST is not a pointer type, call the normal function. */
3270 if (dest_align == 0)
3271 return NULL_RTX;
3273 /* If either SRC is not a pointer type, don't do this
3274 operation in-line. */
3275 if (src_align == 0)
3276 return NULL_RTX;
3278 if (currently_expanding_gimple_stmt)
3279 stringop_block_profile (currently_expanding_gimple_stmt,
3280 &expected_align, &expected_size);
3282 if (expected_align < dest_align)
3283 expected_align = dest_align;
3284 dest_mem = get_memory_rtx (dest, len);
3285 set_mem_align (dest_mem, dest_align);
3286 len_rtx = expand_normal (len);
3287 src_str = c_getstr (src);
3289 /* If SRC is a string constant and block move would be done
3290 by pieces, we can avoid loading the string from memory
3291 and only stored the computed constants. */
3292 if (src_str
3293 && CONST_INT_P (len_rtx)
3294 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3295 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3296 CONST_CAST (char *, src_str),
3297 dest_align, false))
3299 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3300 builtin_memcpy_read_str,
3301 CONST_CAST (char *, src_str),
3302 dest_align, false, 0);
3303 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3304 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3305 return dest_mem;
3308 src_mem = get_memory_rtx (src, len);
3309 set_mem_align (src_mem, src_align);
3311 /* Copy word part most expediently. */
3312 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3313 CALL_EXPR_TAILCALL (exp)
3314 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3315 expected_align, expected_size);
3317 if (dest_addr == 0)
3319 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3320 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3322 return dest_addr;
3326 /* Expand a call EXP to the mempcpy builtin.
3327 Return NULL_RTX if we failed; the caller should emit a normal call,
3328 otherwise try to get the result in TARGET, if convenient (and in
3329 mode MODE if that's convenient). If ENDP is 0 return the
3330 destination pointer, if ENDP is 1 return the end pointer ala
3331 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3332 stpcpy. */
3334 static rtx
3335 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3337 if (!validate_arglist (exp,
3338 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3339 return NULL_RTX;
3340 else
3342 tree dest = CALL_EXPR_ARG (exp, 0);
3343 tree src = CALL_EXPR_ARG (exp, 1);
3344 tree len = CALL_EXPR_ARG (exp, 2);
3345 return expand_builtin_mempcpy_args (dest, src, len,
3346 target, mode, /*endp=*/ 1);
3350 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3351 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3352 so that this can also be called without constructing an actual CALL_EXPR.
3353 The other arguments and return value are the same as for
3354 expand_builtin_mempcpy. */
3356 static rtx
3357 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3358 rtx target, enum machine_mode mode, int endp)
3360 /* If return value is ignored, transform mempcpy into memcpy. */
3361 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3363 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3364 tree result = build_call_nofold (fn, 3, dest, src, len);
3365 return expand_expr (result, target, mode, EXPAND_NORMAL);
3367 else
3369 const char *src_str;
3370 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3371 unsigned int dest_align
3372 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3373 rtx dest_mem, src_mem, len_rtx;
3375 /* If either SRC or DEST is not a pointer type, don't do this
3376 operation in-line. */
3377 if (dest_align == 0 || src_align == 0)
3378 return NULL_RTX;
3380 /* If LEN is not constant, call the normal function. */
3381 if (! host_integerp (len, 1))
3382 return NULL_RTX;
3384 len_rtx = expand_normal (len);
3385 src_str = c_getstr (src);
3387 /* If SRC is a string constant and block move would be done
3388 by pieces, we can avoid loading the string from memory
3389 and only stored the computed constants. */
3390 if (src_str
3391 && CONST_INT_P (len_rtx)
3392 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3393 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3394 CONST_CAST (char *, src_str),
3395 dest_align, false))
3397 dest_mem = get_memory_rtx (dest, len);
3398 set_mem_align (dest_mem, dest_align);
3399 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3400 builtin_memcpy_read_str,
3401 CONST_CAST (char *, src_str),
3402 dest_align, false, endp);
3403 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3404 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3405 return dest_mem;
3408 if (CONST_INT_P (len_rtx)
3409 && can_move_by_pieces (INTVAL (len_rtx),
3410 MIN (dest_align, src_align)))
3412 dest_mem = get_memory_rtx (dest, len);
3413 set_mem_align (dest_mem, dest_align);
3414 src_mem = get_memory_rtx (src, len);
3415 set_mem_align (src_mem, src_align);
3416 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3417 MIN (dest_align, src_align), endp);
3418 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3419 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3420 return dest_mem;
3423 return NULL_RTX;
3427 #ifndef HAVE_movstr
3428 # define HAVE_movstr 0
3429 # define CODE_FOR_movstr CODE_FOR_nothing
3430 #endif
3432 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3433 we failed, the caller should emit a normal call, otherwise try to
3434 get the result in TARGET, if convenient. If ENDP is 0 return the
3435 destination pointer, if ENDP is 1 return the end pointer ala
3436 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3437 stpcpy. */
3439 static rtx
3440 expand_movstr (tree dest, tree src, rtx target, int endp)
3442 rtx end;
3443 rtx dest_mem;
3444 rtx src_mem;
3445 rtx insn;
3446 const struct insn_data * data;
3448 if (!HAVE_movstr)
3449 return NULL_RTX;
3451 dest_mem = get_memory_rtx (dest, NULL);
3452 src_mem = get_memory_rtx (src, NULL);
3453 if (!endp)
3455 target = force_reg (Pmode, XEXP (dest_mem, 0));
3456 dest_mem = replace_equiv_address (dest_mem, target);
3457 end = gen_reg_rtx (Pmode);
3459 else
3461 if (target == 0 || target == const0_rtx)
3463 end = gen_reg_rtx (Pmode);
3464 if (target == 0)
3465 target = end;
3467 else
3468 end = target;
3471 data = insn_data + CODE_FOR_movstr;
3473 if (data->operand[0].mode != VOIDmode)
3474 end = gen_lowpart (data->operand[0].mode, end);
3476 insn = data->genfun (end, dest_mem, src_mem);
3478 gcc_assert (insn);
3480 emit_insn (insn);
3482 /* movstr is supposed to set end to the address of the NUL
3483 terminator. If the caller requested a mempcpy-like return value,
3484 adjust it. */
3485 if (endp == 1 && target != const0_rtx)
3487 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3488 emit_move_insn (target, force_operand (tem, NULL_RTX));
3491 return target;
3494 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3495 NULL_RTX if we failed the caller should emit a normal call, otherwise
3496 try to get the result in TARGET, if convenient (and in mode MODE if that's
3497 convenient). */
3499 static rtx
3500 expand_builtin_strcpy (tree exp, rtx target)
3502 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3504 tree dest = CALL_EXPR_ARG (exp, 0);
3505 tree src = CALL_EXPR_ARG (exp, 1);
3506 return expand_builtin_strcpy_args (dest, src, target);
3508 return NULL_RTX;
3511 /* Helper function to do the actual work for expand_builtin_strcpy. The
3512 arguments to the builtin_strcpy call DEST and SRC are broken out
3513 so that this can also be called without constructing an actual CALL_EXPR.
3514 The other arguments and return value are the same as for
3515 expand_builtin_strcpy. */
3517 static rtx
3518 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3520 return expand_movstr (dest, src, target, /*endp=*/0);
3523 /* Expand a call EXP to the stpcpy builtin.
3524 Return NULL_RTX if we failed the caller should emit a normal call,
3525 otherwise try to get the result in TARGET, if convenient (and in
3526 mode MODE if that's convenient). */
3528 static rtx
3529 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3531 tree dst, src;
3532 location_t loc = EXPR_LOCATION (exp);
3534 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3535 return NULL_RTX;
3537 dst = CALL_EXPR_ARG (exp, 0);
3538 src = CALL_EXPR_ARG (exp, 1);
3540 /* If return value is ignored, transform stpcpy into strcpy. */
3541 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3543 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3544 tree result = build_call_nofold (fn, 2, dst, src);
3545 return expand_expr (result, target, mode, EXPAND_NORMAL);
3547 else
3549 tree len, lenp1;
3550 rtx ret;
3552 /* Ensure we get an actual string whose length can be evaluated at
3553 compile-time, not an expression containing a string. This is
3554 because the latter will potentially produce pessimized code
3555 when used to produce the return value. */
3556 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3557 return expand_movstr (dst, src, target, /*endp=*/2);
3559 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3560 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3561 target, mode, /*endp=*/2);
3563 if (ret)
3564 return ret;
3566 if (TREE_CODE (len) == INTEGER_CST)
3568 rtx len_rtx = expand_normal (len);
3570 if (CONST_INT_P (len_rtx))
3572 ret = expand_builtin_strcpy_args (dst, src, target);
3574 if (ret)
3576 if (! target)
3578 if (mode != VOIDmode)
3579 target = gen_reg_rtx (mode);
3580 else
3581 target = gen_reg_rtx (GET_MODE (ret));
3583 if (GET_MODE (target) != GET_MODE (ret))
3584 ret = gen_lowpart (GET_MODE (target), ret);
3586 ret = plus_constant (ret, INTVAL (len_rtx));
3587 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3588 gcc_assert (ret);
3590 return target;
3595 return expand_movstr (dst, src, target, /*endp=*/2);
3599 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3600 bytes from constant string DATA + OFFSET and return it as target
3601 constant. */
3604 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3605 enum machine_mode mode)
3607 const char *str = (const char *) data;
3609 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3610 return const0_rtx;
3612 return c_readstr (str + offset, mode);
3615 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3616 NULL_RTX if we failed the caller should emit a normal call. */
3618 static rtx
3619 expand_builtin_strncpy (tree exp, rtx target)
3621 location_t loc = EXPR_LOCATION (exp);
3623 if (validate_arglist (exp,
3624 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3626 tree dest = CALL_EXPR_ARG (exp, 0);
3627 tree src = CALL_EXPR_ARG (exp, 1);
3628 tree len = CALL_EXPR_ARG (exp, 2);
3629 tree slen = c_strlen (src, 1);
3631 /* We must be passed a constant len and src parameter. */
3632 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3633 return NULL_RTX;
3635 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3637 /* We're required to pad with trailing zeros if the requested
3638 len is greater than strlen(s2)+1. In that case try to
3639 use store_by_pieces, if it fails, punt. */
3640 if (tree_int_cst_lt (slen, len))
3642 unsigned int dest_align
3643 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3644 const char *p = c_getstr (src);
3645 rtx dest_mem;
3647 if (!p || dest_align == 0 || !host_integerp (len, 1)
3648 || !can_store_by_pieces (tree_low_cst (len, 1),
3649 builtin_strncpy_read_str,
3650 CONST_CAST (char *, p),
3651 dest_align, false))
3652 return NULL_RTX;
3654 dest_mem = get_memory_rtx (dest, len);
3655 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3656 builtin_strncpy_read_str,
3657 CONST_CAST (char *, p), dest_align, false, 0);
3658 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3659 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3660 return dest_mem;
3663 return NULL_RTX;
3666 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3667 bytes from constant string DATA + OFFSET and return it as target
3668 constant. */
3671 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3672 enum machine_mode mode)
3674 const char *c = (const char *) data;
3675 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3677 memset (p, *c, GET_MODE_SIZE (mode));
3679 return c_readstr (p, mode);
3682 /* Callback routine for store_by_pieces. Return the RTL of a register
3683 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3684 char value given in the RTL register data. For example, if mode is
3685 4 bytes wide, return the RTL for 0x01010101*data. */
3687 static rtx
3688 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3689 enum machine_mode mode)
3691 rtx target, coeff;
3692 size_t size;
3693 char *p;
3695 size = GET_MODE_SIZE (mode);
3696 if (size == 1)
3697 return (rtx) data;
3699 p = XALLOCAVEC (char, size);
3700 memset (p, 1, size);
3701 coeff = c_readstr (p, mode);
3703 target = convert_to_mode (mode, (rtx) data, 1);
3704 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3705 return force_reg (mode, target);
3708 /* Expand expression EXP, which is a call to the memset builtin. Return
3709 NULL_RTX if we failed the caller should emit a normal call, otherwise
3710 try to get the result in TARGET, if convenient (and in mode MODE if that's
3711 convenient). */
3713 static rtx
3714 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3716 if (!validate_arglist (exp,
3717 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3718 return NULL_RTX;
3719 else
3721 tree dest = CALL_EXPR_ARG (exp, 0);
3722 tree val = CALL_EXPR_ARG (exp, 1);
3723 tree len = CALL_EXPR_ARG (exp, 2);
3724 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3728 /* Helper function to do the actual work for expand_builtin_memset. The
3729 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3730 so that this can also be called without constructing an actual CALL_EXPR.
3731 The other arguments and return value are the same as for
3732 expand_builtin_memset. */
3734 static rtx
3735 expand_builtin_memset_args (tree dest, tree val, tree len,
3736 rtx target, enum machine_mode mode, tree orig_exp)
3738 tree fndecl, fn;
3739 enum built_in_function fcode;
3740 char c;
3741 unsigned int dest_align;
3742 rtx dest_mem, dest_addr, len_rtx;
3743 HOST_WIDE_INT expected_size = -1;
3744 unsigned int expected_align = 0;
3746 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3748 /* If DEST is not a pointer type, don't do this operation in-line. */
3749 if (dest_align == 0)
3750 return NULL_RTX;
3752 if (currently_expanding_gimple_stmt)
3753 stringop_block_profile (currently_expanding_gimple_stmt,
3754 &expected_align, &expected_size);
3756 if (expected_align < dest_align)
3757 expected_align = dest_align;
3759 /* If the LEN parameter is zero, return DEST. */
3760 if (integer_zerop (len))
3762 /* Evaluate and ignore VAL in case it has side-effects. */
3763 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3764 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3767 /* Stabilize the arguments in case we fail. */
3768 dest = builtin_save_expr (dest);
3769 val = builtin_save_expr (val);
3770 len = builtin_save_expr (len);
3772 len_rtx = expand_normal (len);
3773 dest_mem = get_memory_rtx (dest, len);
3775 if (TREE_CODE (val) != INTEGER_CST)
3777 rtx val_rtx;
3779 val_rtx = expand_normal (val);
3780 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3781 val_rtx, 0);
3783 /* Assume that we can memset by pieces if we can store
3784 * the coefficients by pieces (in the required modes).
3785 * We can't pass builtin_memset_gen_str as that emits RTL. */
3786 c = 1;
3787 if (host_integerp (len, 1)
3788 && can_store_by_pieces (tree_low_cst (len, 1),
3789 builtin_memset_read_str, &c, dest_align,
3790 true))
3792 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3793 val_rtx);
3794 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3795 builtin_memset_gen_str, val_rtx, dest_align,
3796 true, 0);
3798 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3799 dest_align, expected_align,
3800 expected_size))
3801 goto do_libcall;
3803 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3804 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3805 return dest_mem;
3808 if (target_char_cast (val, &c))
3809 goto do_libcall;
3811 if (c)
3813 if (host_integerp (len, 1)
3814 && can_store_by_pieces (tree_low_cst (len, 1),
3815 builtin_memset_read_str, &c, dest_align,
3816 true))
3817 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3818 builtin_memset_read_str, &c, dest_align, true, 0);
3819 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3820 dest_align, expected_align,
3821 expected_size))
3822 goto do_libcall;
3824 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3825 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3826 return dest_mem;
3829 set_mem_align (dest_mem, dest_align);
3830 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3831 CALL_EXPR_TAILCALL (orig_exp)
3832 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3833 expected_align, expected_size);
3835 if (dest_addr == 0)
3837 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3838 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3841 return dest_addr;
3843 do_libcall:
3844 fndecl = get_callee_fndecl (orig_exp);
3845 fcode = DECL_FUNCTION_CODE (fndecl);
3846 if (fcode == BUILT_IN_MEMSET)
3847 fn = build_call_nofold (fndecl, 3, dest, val, len);
3848 else if (fcode == BUILT_IN_BZERO)
3849 fn = build_call_nofold (fndecl, 2, dest, len);
3850 else
3851 gcc_unreachable ();
3852 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3853 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3854 return expand_call (fn, target, target == const0_rtx);
3857 /* Expand expression EXP, which is a call to the bzero builtin. Return
3858 NULL_RTX if we failed the caller should emit a normal call. */
3860 static rtx
3861 expand_builtin_bzero (tree exp)
3863 tree dest, size;
3864 location_t loc = EXPR_LOCATION (exp);
3866 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3867 return NULL_RTX;
3869 dest = CALL_EXPR_ARG (exp, 0);
3870 size = CALL_EXPR_ARG (exp, 1);
3872 /* New argument list transforming bzero(ptr x, int y) to
3873 memset(ptr x, int 0, size_t y). This is done this way
3874 so that if it isn't expanded inline, we fallback to
3875 calling bzero instead of memset. */
3877 return expand_builtin_memset_args (dest, integer_zero_node,
3878 fold_convert_loc (loc, sizetype, size),
3879 const0_rtx, VOIDmode, exp);
3882 /* Expand expression EXP, which is a call to the memcmp built-in function.
3883 Return NULL_RTX if we failed and the
3884 caller should emit a normal call, otherwise try to get the result in
3885 TARGET, if convenient (and in mode MODE, if that's convenient). */
3887 static rtx
3888 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3889 ATTRIBUTE_UNUSED enum machine_mode mode)
3891 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3893 if (!validate_arglist (exp,
3894 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3895 return NULL_RTX;
3897 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3899 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3900 rtx result;
3901 rtx insn;
3902 tree arg1 = CALL_EXPR_ARG (exp, 0);
3903 tree arg2 = CALL_EXPR_ARG (exp, 1);
3904 tree len = CALL_EXPR_ARG (exp, 2);
3906 int arg1_align
3907 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3908 int arg2_align
3909 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3910 enum machine_mode insn_mode;
3912 #ifdef HAVE_cmpmemsi
3913 if (HAVE_cmpmemsi)
3914 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3915 else
3916 #endif
3917 #ifdef HAVE_cmpstrnsi
3918 if (HAVE_cmpstrnsi)
3919 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3920 else
3921 #endif
3922 return NULL_RTX;
3924 /* If we don't have POINTER_TYPE, call the function. */
3925 if (arg1_align == 0 || arg2_align == 0)
3926 return NULL_RTX;
3928 /* Make a place to write the result of the instruction. */
3929 result = target;
3930 if (! (result != 0
3931 && REG_P (result) && GET_MODE (result) == insn_mode
3932 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3933 result = gen_reg_rtx (insn_mode);
3935 arg1_rtx = get_memory_rtx (arg1, len);
3936 arg2_rtx = get_memory_rtx (arg2, len);
3937 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3939 /* Set MEM_SIZE as appropriate. */
3940 if (CONST_INT_P (arg3_rtx))
3942 set_mem_size (arg1_rtx, arg3_rtx);
3943 set_mem_size (arg2_rtx, arg3_rtx);
3946 #ifdef HAVE_cmpmemsi
3947 if (HAVE_cmpmemsi)
3948 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3949 GEN_INT (MIN (arg1_align, arg2_align)));
3950 else
3951 #endif
3952 #ifdef HAVE_cmpstrnsi
3953 if (HAVE_cmpstrnsi)
3954 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3955 GEN_INT (MIN (arg1_align, arg2_align)));
3956 else
3957 #endif
3958 gcc_unreachable ();
3960 if (insn)
3961 emit_insn (insn);
3962 else
3963 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3964 TYPE_MODE (integer_type_node), 3,
3965 XEXP (arg1_rtx, 0), Pmode,
3966 XEXP (arg2_rtx, 0), Pmode,
3967 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3968 TYPE_UNSIGNED (sizetype)),
3969 TYPE_MODE (sizetype));
3971 /* Return the value in the proper mode for this function. */
3972 mode = TYPE_MODE (TREE_TYPE (exp));
3973 if (GET_MODE (result) == mode)
3974 return result;
3975 else if (target != 0)
3977 convert_move (target, result, 0);
3978 return target;
3980 else
3981 return convert_to_mode (mode, result, 0);
3983 #endif
3985 return NULL_RTX;
3988 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3989 if we failed the caller should emit a normal call, otherwise try to get
3990 the result in TARGET, if convenient. */
3992 static rtx
3993 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3995 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3996 return NULL_RTX;
3998 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3999 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4000 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4002 rtx arg1_rtx, arg2_rtx;
4003 rtx result, insn = NULL_RTX;
4004 tree fndecl, fn;
4005 tree arg1 = CALL_EXPR_ARG (exp, 0);
4006 tree arg2 = CALL_EXPR_ARG (exp, 1);
4008 int arg1_align
4009 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4010 int arg2_align
4011 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4013 /* If we don't have POINTER_TYPE, call the function. */
4014 if (arg1_align == 0 || arg2_align == 0)
4015 return NULL_RTX;
4017 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4018 arg1 = builtin_save_expr (arg1);
4019 arg2 = builtin_save_expr (arg2);
4021 arg1_rtx = get_memory_rtx (arg1, NULL);
4022 arg2_rtx = get_memory_rtx (arg2, NULL);
4024 #ifdef HAVE_cmpstrsi
4025 /* Try to call cmpstrsi. */
4026 if (HAVE_cmpstrsi)
4028 enum machine_mode insn_mode
4029 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4031 /* Make a place to write the result of the instruction. */
4032 result = target;
4033 if (! (result != 0
4034 && REG_P (result) && GET_MODE (result) == insn_mode
4035 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4036 result = gen_reg_rtx (insn_mode);
4038 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4039 GEN_INT (MIN (arg1_align, arg2_align)));
4041 #endif
4042 #ifdef HAVE_cmpstrnsi
4043 /* Try to determine at least one length and call cmpstrnsi. */
4044 if (!insn && HAVE_cmpstrnsi)
4046 tree len;
4047 rtx arg3_rtx;
4049 enum machine_mode insn_mode
4050 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4051 tree len1 = c_strlen (arg1, 1);
4052 tree len2 = c_strlen (arg2, 1);
4054 if (len1)
4055 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4056 if (len2)
4057 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4059 /* If we don't have a constant length for the first, use the length
4060 of the second, if we know it. We don't require a constant for
4061 this case; some cost analysis could be done if both are available
4062 but neither is constant. For now, assume they're equally cheap,
4063 unless one has side effects. If both strings have constant lengths,
4064 use the smaller. */
4066 if (!len1)
4067 len = len2;
4068 else if (!len2)
4069 len = len1;
4070 else if (TREE_SIDE_EFFECTS (len1))
4071 len = len2;
4072 else if (TREE_SIDE_EFFECTS (len2))
4073 len = len1;
4074 else if (TREE_CODE (len1) != INTEGER_CST)
4075 len = len2;
4076 else if (TREE_CODE (len2) != INTEGER_CST)
4077 len = len1;
4078 else if (tree_int_cst_lt (len1, len2))
4079 len = len1;
4080 else
4081 len = len2;
4083 /* If both arguments have side effects, we cannot optimize. */
4084 if (!len || TREE_SIDE_EFFECTS (len))
4085 goto do_libcall;
4087 arg3_rtx = expand_normal (len);
4089 /* Make a place to write the result of the instruction. */
4090 result = target;
4091 if (! (result != 0
4092 && REG_P (result) && GET_MODE (result) == insn_mode
4093 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4094 result = gen_reg_rtx (insn_mode);
4096 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4097 GEN_INT (MIN (arg1_align, arg2_align)));
4099 #endif
4101 if (insn)
4103 enum machine_mode mode;
4104 emit_insn (insn);
4106 /* Return the value in the proper mode for this function. */
4107 mode = TYPE_MODE (TREE_TYPE (exp));
4108 if (GET_MODE (result) == mode)
4109 return result;
4110 if (target == 0)
4111 return convert_to_mode (mode, result, 0);
4112 convert_move (target, result, 0);
4113 return target;
4116 /* Expand the library call ourselves using a stabilized argument
4117 list to avoid re-evaluating the function's arguments twice. */
4118 #ifdef HAVE_cmpstrnsi
4119 do_libcall:
4120 #endif
4121 fndecl = get_callee_fndecl (exp);
4122 fn = build_call_nofold (fndecl, 2, arg1, arg2);
4123 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4124 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4125 return expand_call (fn, target, target == const0_rtx);
4127 #endif
4128 return NULL_RTX;
4131 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4132 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4133 the result in TARGET, if convenient. */
4135 static rtx
4136 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4137 ATTRIBUTE_UNUSED enum machine_mode mode)
4139 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4141 if (!validate_arglist (exp,
4142 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4143 return NULL_RTX;
4145 /* If c_strlen can determine an expression for one of the string
4146 lengths, and it doesn't have side effects, then emit cmpstrnsi
4147 using length MIN(strlen(string)+1, arg3). */
4148 #ifdef HAVE_cmpstrnsi
4149 if (HAVE_cmpstrnsi)
4151 tree len, len1, len2;
4152 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4153 rtx result, insn;
4154 tree fndecl, fn;
4155 tree arg1 = CALL_EXPR_ARG (exp, 0);
4156 tree arg2 = CALL_EXPR_ARG (exp, 1);
4157 tree arg3 = CALL_EXPR_ARG (exp, 2);
4159 int arg1_align
4160 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4161 int arg2_align
4162 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4163 enum machine_mode insn_mode
4164 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4166 len1 = c_strlen (arg1, 1);
4167 len2 = c_strlen (arg2, 1);
4169 if (len1)
4170 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4171 if (len2)
4172 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4174 /* If we don't have a constant length for the first, use the length
4175 of the second, if we know it. We don't require a constant for
4176 this case; some cost analysis could be done if both are available
4177 but neither is constant. For now, assume they're equally cheap,
4178 unless one has side effects. If both strings have constant lengths,
4179 use the smaller. */
4181 if (!len1)
4182 len = len2;
4183 else if (!len2)
4184 len = len1;
4185 else if (TREE_SIDE_EFFECTS (len1))
4186 len = len2;
4187 else if (TREE_SIDE_EFFECTS (len2))
4188 len = len1;
4189 else if (TREE_CODE (len1) != INTEGER_CST)
4190 len = len2;
4191 else if (TREE_CODE (len2) != INTEGER_CST)
4192 len = len1;
4193 else if (tree_int_cst_lt (len1, len2))
4194 len = len1;
4195 else
4196 len = len2;
4198 /* If both arguments have side effects, we cannot optimize. */
4199 if (!len || TREE_SIDE_EFFECTS (len))
4200 return NULL_RTX;
4202 /* The actual new length parameter is MIN(len,arg3). */
4203 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4204 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4206 /* If we don't have POINTER_TYPE, call the function. */
4207 if (arg1_align == 0 || arg2_align == 0)
4208 return NULL_RTX;
4210 /* Make a place to write the result of the instruction. */
4211 result = target;
4212 if (! (result != 0
4213 && REG_P (result) && GET_MODE (result) == insn_mode
4214 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4215 result = gen_reg_rtx (insn_mode);
4217 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4218 arg1 = builtin_save_expr (arg1);
4219 arg2 = builtin_save_expr (arg2);
4220 len = builtin_save_expr (len);
4222 arg1_rtx = get_memory_rtx (arg1, len);
4223 arg2_rtx = get_memory_rtx (arg2, len);
4224 arg3_rtx = expand_normal (len);
4225 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4226 GEN_INT (MIN (arg1_align, arg2_align)));
4227 if (insn)
4229 emit_insn (insn);
4231 /* Return the value in the proper mode for this function. */
4232 mode = TYPE_MODE (TREE_TYPE (exp));
4233 if (GET_MODE (result) == mode)
4234 return result;
4235 if (target == 0)
4236 return convert_to_mode (mode, result, 0);
4237 convert_move (target, result, 0);
4238 return target;
4241 /* Expand the library call ourselves using a stabilized argument
4242 list to avoid re-evaluating the function's arguments twice. */
4243 fndecl = get_callee_fndecl (exp);
4244 fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
4245 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4246 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4247 return expand_call (fn, target, target == const0_rtx);
4249 #endif
4250 return NULL_RTX;
4253 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4254 if that's convenient. */
4257 expand_builtin_saveregs (void)
4259 rtx val, seq;
4261 /* Don't do __builtin_saveregs more than once in a function.
4262 Save the result of the first call and reuse it. */
4263 if (saveregs_value != 0)
4264 return saveregs_value;
4266 /* When this function is called, it means that registers must be
4267 saved on entry to this function. So we migrate the call to the
4268 first insn of this function. */
4270 start_sequence ();
4272 /* Do whatever the machine needs done in this case. */
4273 val = targetm.calls.expand_builtin_saveregs ();
4275 seq = get_insns ();
4276 end_sequence ();
4278 saveregs_value = val;
4280 /* Put the insns after the NOTE that starts the function. If this
4281 is inside a start_sequence, make the outer-level insn chain current, so
4282 the code is placed at the start of the function. */
4283 push_topmost_sequence ();
4284 emit_insn_after (seq, entry_of_function ());
4285 pop_topmost_sequence ();
4287 return val;
4290 /* __builtin_args_info (N) returns word N of the arg space info
4291 for the current function. The number and meanings of words
4292 is controlled by the definition of CUMULATIVE_ARGS. */
4294 static rtx
4295 expand_builtin_args_info (tree exp)
4297 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4298 int *word_ptr = (int *) &crtl->args.info;
4300 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4302 if (call_expr_nargs (exp) != 0)
4304 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4305 error ("argument of %<__builtin_args_info%> must be constant");
4306 else
4308 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4310 if (wordnum < 0 || wordnum >= nwords)
4311 error ("argument of %<__builtin_args_info%> out of range");
4312 else
4313 return GEN_INT (word_ptr[wordnum]);
4316 else
4317 error ("missing argument in %<__builtin_args_info%>");
4319 return const0_rtx;
4322 /* Expand a call to __builtin_next_arg. */
4324 static rtx
4325 expand_builtin_next_arg (void)
4327 /* Checking arguments is already done in fold_builtin_next_arg
4328 that must be called before this function. */
4329 return expand_binop (ptr_mode, add_optab,
4330 crtl->args.internal_arg_pointer,
4331 crtl->args.arg_offset_rtx,
4332 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4335 /* Make it easier for the backends by protecting the valist argument
4336 from multiple evaluations. */
4338 static tree
4339 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4341 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4343 gcc_assert (vatype != NULL_TREE);
4345 if (TREE_CODE (vatype) == ARRAY_TYPE)
4347 if (TREE_SIDE_EFFECTS (valist))
4348 valist = save_expr (valist);
4350 /* For this case, the backends will be expecting a pointer to
4351 vatype, but it's possible we've actually been given an array
4352 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4353 So fix it. */
4354 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4356 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4357 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4360 else
4362 tree pt;
4364 if (! needs_lvalue)
4366 if (! TREE_SIDE_EFFECTS (valist))
4367 return valist;
4369 pt = build_pointer_type (vatype);
4370 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4371 TREE_SIDE_EFFECTS (valist) = 1;
4374 if (TREE_SIDE_EFFECTS (valist))
4375 valist = save_expr (valist);
4376 valist = build_fold_indirect_ref_loc (loc, valist);
4379 return valist;
4382 /* The "standard" definition of va_list is void*. */
4384 tree
4385 std_build_builtin_va_list (void)
4387 return ptr_type_node;
4390 /* The "standard" abi va_list is va_list_type_node. */
4392 tree
4393 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4395 return va_list_type_node;
4398 /* The "standard" type of va_list is va_list_type_node. */
4400 tree
4401 std_canonical_va_list_type (tree type)
4403 tree wtype, htype;
4405 if (INDIRECT_REF_P (type))
4406 type = TREE_TYPE (type);
4407 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4408 type = TREE_TYPE (type);
4409 wtype = va_list_type_node;
4410 htype = type;
4411 /* Treat structure va_list types. */
4412 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4413 htype = TREE_TYPE (htype);
4414 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4416 /* If va_list is an array type, the argument may have decayed
4417 to a pointer type, e.g. by being passed to another function.
4418 In that case, unwrap both types so that we can compare the
4419 underlying records. */
4420 if (TREE_CODE (htype) == ARRAY_TYPE
4421 || POINTER_TYPE_P (htype))
4423 wtype = TREE_TYPE (wtype);
4424 htype = TREE_TYPE (htype);
4427 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4428 return va_list_type_node;
4430 return NULL_TREE;
4433 /* The "standard" implementation of va_start: just assign `nextarg' to
4434 the variable. */
4436 void
4437 std_expand_builtin_va_start (tree valist, rtx nextarg)
4439 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4440 convert_move (va_r, nextarg, 0);
4443 /* Expand EXP, a call to __builtin_va_start. */
4445 static rtx
4446 expand_builtin_va_start (tree exp)
4448 rtx nextarg;
4449 tree valist;
4450 location_t loc = EXPR_LOCATION (exp);
4452 if (call_expr_nargs (exp) < 2)
4454 error_at (loc, "too few arguments to function %<va_start%>");
4455 return const0_rtx;
4458 if (fold_builtin_next_arg (exp, true))
4459 return const0_rtx;
4461 nextarg = expand_builtin_next_arg ();
4462 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4464 if (targetm.expand_builtin_va_start)
4465 targetm.expand_builtin_va_start (valist, nextarg);
4466 else
4467 std_expand_builtin_va_start (valist, nextarg);
4469 return const0_rtx;
4472 /* The "standard" implementation of va_arg: read the value from the
4473 current (padded) address and increment by the (padded) size. */
4475 tree
4476 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4477 gimple_seq *post_p)
4479 tree addr, t, type_size, rounded_size, valist_tmp;
4480 unsigned HOST_WIDE_INT align, boundary;
4481 bool indirect;
4483 #ifdef ARGS_GROW_DOWNWARD
4484 /* All of the alignment and movement below is for args-grow-up machines.
4485 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4486 implement their own specialized gimplify_va_arg_expr routines. */
4487 gcc_unreachable ();
4488 #endif
4490 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4491 if (indirect)
4492 type = build_pointer_type (type);
4494 align = PARM_BOUNDARY / BITS_PER_UNIT;
4495 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4497 /* When we align parameter on stack for caller, if the parameter
4498 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4499 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4500 here with caller. */
4501 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4502 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4504 boundary /= BITS_PER_UNIT;
4506 /* Hoist the valist value into a temporary for the moment. */
4507 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4509 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4510 requires greater alignment, we must perform dynamic alignment. */
4511 if (boundary > align
4512 && !integer_zerop (TYPE_SIZE (type)))
4514 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4515 fold_build2 (POINTER_PLUS_EXPR,
4516 TREE_TYPE (valist),
4517 valist_tmp, size_int (boundary - 1)));
4518 gimplify_and_add (t, pre_p);
4520 t = fold_convert (sizetype, valist_tmp);
4521 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4522 fold_convert (TREE_TYPE (valist),
4523 fold_build2 (BIT_AND_EXPR, sizetype, t,
4524 size_int (-boundary))));
4525 gimplify_and_add (t, pre_p);
4527 else
4528 boundary = align;
4530 /* If the actual alignment is less than the alignment of the type,
4531 adjust the type accordingly so that we don't assume strict alignment
4532 when dereferencing the pointer. */
4533 boundary *= BITS_PER_UNIT;
4534 if (boundary < TYPE_ALIGN (type))
4536 type = build_variant_type_copy (type);
4537 TYPE_ALIGN (type) = boundary;
4540 /* Compute the rounded size of the type. */
4541 type_size = size_in_bytes (type);
4542 rounded_size = round_up (type_size, align);
4544 /* Reduce rounded_size so it's sharable with the postqueue. */
4545 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4547 /* Get AP. */
4548 addr = valist_tmp;
4549 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4551 /* Small args are padded downward. */
4552 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4553 rounded_size, size_int (align));
4554 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4555 size_binop (MINUS_EXPR, rounded_size, type_size));
4556 addr = fold_build2 (POINTER_PLUS_EXPR,
4557 TREE_TYPE (addr), addr, t);
4560 /* Compute new value for AP. */
4561 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4562 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4563 gimplify_and_add (t, pre_p);
4565 addr = fold_convert (build_pointer_type (type), addr);
4567 if (indirect)
4568 addr = build_va_arg_indirect_ref (addr);
4570 return build_va_arg_indirect_ref (addr);
4573 /* Build an indirect-ref expression over the given TREE, which represents a
4574 piece of a va_arg() expansion. */
4575 tree
4576 build_va_arg_indirect_ref (tree addr)
4578 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4580 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4581 mf_mark (addr);
4583 return addr;
4586 /* Return a dummy expression of type TYPE in order to keep going after an
4587 error. */
4589 static tree
4590 dummy_object (tree type)
4592 tree t = build_int_cst (build_pointer_type (type), 0);
4593 return build1 (INDIRECT_REF, type, t);
4596 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4597 builtin function, but a very special sort of operator. */
4599 enum gimplify_status
4600 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4602 tree promoted_type, have_va_type;
4603 tree valist = TREE_OPERAND (*expr_p, 0);
4604 tree type = TREE_TYPE (*expr_p);
4605 tree t;
4606 location_t loc = EXPR_LOCATION (*expr_p);
4608 /* Verify that valist is of the proper type. */
4609 have_va_type = TREE_TYPE (valist);
4610 if (have_va_type == error_mark_node)
4611 return GS_ERROR;
4612 have_va_type = targetm.canonical_va_list_type (have_va_type);
4614 if (have_va_type == NULL_TREE)
4616 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4617 return GS_ERROR;
4620 /* Generate a diagnostic for requesting data of a type that cannot
4621 be passed through `...' due to type promotion at the call site. */
4622 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4623 != type)
4625 static bool gave_help;
4626 bool warned;
4628 /* Unfortunately, this is merely undefined, rather than a constraint
4629 violation, so we cannot make this an error. If this call is never
4630 executed, the program is still strictly conforming. */
4631 warned = warning_at (loc, 0,
4632 "%qT is promoted to %qT when passed through %<...%>",
4633 type, promoted_type);
4634 if (!gave_help && warned)
4636 gave_help = true;
4637 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4638 promoted_type, type);
4641 /* We can, however, treat "undefined" any way we please.
4642 Call abort to encourage the user to fix the program. */
4643 if (warned)
4644 inform (loc, "if this code is reached, the program will abort");
4645 /* Before the abort, allow the evaluation of the va_list
4646 expression to exit or longjmp. */
4647 gimplify_and_add (valist, pre_p);
4648 t = build_call_expr_loc (loc,
4649 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4650 gimplify_and_add (t, pre_p);
4652 /* This is dead code, but go ahead and finish so that the
4653 mode of the result comes out right. */
4654 *expr_p = dummy_object (type);
4655 return GS_ALL_DONE;
4657 else
4659 /* Make it easier for the backends by protecting the valist argument
4660 from multiple evaluations. */
4661 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4663 /* For this case, the backends will be expecting a pointer to
4664 TREE_TYPE (abi), but it's possible we've
4665 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4666 So fix it. */
4667 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4669 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4670 valist = fold_convert_loc (loc, p1,
4671 build_fold_addr_expr_loc (loc, valist));
4674 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4676 else
4677 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4679 if (!targetm.gimplify_va_arg_expr)
4680 /* FIXME: Once most targets are converted we should merely
4681 assert this is non-null. */
4682 return GS_ALL_DONE;
4684 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4685 return GS_OK;
4689 /* Expand EXP, a call to __builtin_va_end. */
4691 static rtx
4692 expand_builtin_va_end (tree exp)
4694 tree valist = CALL_EXPR_ARG (exp, 0);
4696 /* Evaluate for side effects, if needed. I hate macros that don't
4697 do that. */
4698 if (TREE_SIDE_EFFECTS (valist))
4699 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4701 return const0_rtx;
4704 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4705 builtin rather than just as an assignment in stdarg.h because of the
4706 nastiness of array-type va_list types. */
4708 static rtx
4709 expand_builtin_va_copy (tree exp)
4711 tree dst, src, t;
4712 location_t loc = EXPR_LOCATION (exp);
4714 dst = CALL_EXPR_ARG (exp, 0);
4715 src = CALL_EXPR_ARG (exp, 1);
4717 dst = stabilize_va_list_loc (loc, dst, 1);
4718 src = stabilize_va_list_loc (loc, src, 0);
4720 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4722 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4724 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4725 TREE_SIDE_EFFECTS (t) = 1;
4726 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4728 else
4730 rtx dstb, srcb, size;
4732 /* Evaluate to pointers. */
4733 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4734 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4735 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4736 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4738 dstb = convert_memory_address (Pmode, dstb);
4739 srcb = convert_memory_address (Pmode, srcb);
4741 /* "Dereference" to BLKmode memories. */
4742 dstb = gen_rtx_MEM (BLKmode, dstb);
4743 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4744 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4745 srcb = gen_rtx_MEM (BLKmode, srcb);
4746 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4747 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4749 /* Copy. */
4750 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4753 return const0_rtx;
4756 /* Expand a call to one of the builtin functions __builtin_frame_address or
4757 __builtin_return_address. */
4759 static rtx
4760 expand_builtin_frame_address (tree fndecl, tree exp)
4762 /* The argument must be a nonnegative integer constant.
4763 It counts the number of frames to scan up the stack.
4764 The value is the return address saved in that frame. */
4765 if (call_expr_nargs (exp) == 0)
4766 /* Warning about missing arg was already issued. */
4767 return const0_rtx;
4768 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4770 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4771 error ("invalid argument to %<__builtin_frame_address%>");
4772 else
4773 error ("invalid argument to %<__builtin_return_address%>");
4774 return const0_rtx;
4776 else
4778 rtx tem
4779 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4780 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4782 /* Some ports cannot access arbitrary stack frames. */
4783 if (tem == NULL)
4785 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4786 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4787 else
4788 warning (0, "unsupported argument to %<__builtin_return_address%>");
4789 return const0_rtx;
4792 /* For __builtin_frame_address, return what we've got. */
4793 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4794 return tem;
4796 if (!REG_P (tem)
4797 && ! CONSTANT_P (tem))
4798 tem = copy_to_mode_reg (Pmode, tem);
4799 return tem;
4803 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4804 we failed and the caller should emit a normal call, otherwise try to get
4805 the result in TARGET, if convenient. */
4807 static rtx
4808 expand_builtin_alloca (tree exp, rtx target)
4810 rtx op0;
4811 rtx result;
4813 /* Emit normal call if marked not-inlineable. */
4814 if (CALL_CANNOT_INLINE_P (exp))
4815 return NULL_RTX;
4817 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4818 return NULL_RTX;
4820 /* Compute the argument. */
4821 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4823 /* Allocate the desired space. */
4824 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4825 result = convert_memory_address (ptr_mode, result);
4827 return result;
4830 /* Expand a call to a bswap builtin with argument ARG0. MODE
4831 is the mode to expand with. */
4833 static rtx
4834 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4836 enum machine_mode mode;
4837 tree arg;
4838 rtx op0;
4840 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4841 return NULL_RTX;
4843 arg = CALL_EXPR_ARG (exp, 0);
4844 mode = TYPE_MODE (TREE_TYPE (arg));
4845 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4847 target = expand_unop (mode, bswap_optab, op0, target, 1);
4849 gcc_assert (target);
4851 return convert_to_mode (mode, target, 0);
4854 /* Expand a call to a unary builtin in EXP.
4855 Return NULL_RTX if a normal call should be emitted rather than expanding the
4856 function in-line. If convenient, the result should be placed in TARGET.
4857 SUBTARGET may be used as the target for computing one of EXP's operands. */
4859 static rtx
4860 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4861 rtx subtarget, optab op_optab)
4863 rtx op0;
4865 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4866 return NULL_RTX;
4868 /* Compute the argument. */
4869 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4870 VOIDmode, EXPAND_NORMAL);
4871 /* Compute op, into TARGET if possible.
4872 Set TARGET to wherever the result comes back. */
4873 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4874 op_optab, op0, target, 1);
4875 gcc_assert (target);
4877 return convert_to_mode (target_mode, target, 0);
4880 /* Expand a call to __builtin_expect. We just return our argument
4881 as the builtin_expect semantic should've been already executed by
4882 tree branch prediction pass. */
4884 static rtx
4885 expand_builtin_expect (tree exp, rtx target)
4887 tree arg;
4889 if (call_expr_nargs (exp) < 2)
4890 return const0_rtx;
4891 arg = CALL_EXPR_ARG (exp, 0);
4893 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4894 /* When guessing was done, the hints should be already stripped away. */
4895 gcc_assert (!flag_guess_branch_prob
4896 || optimize == 0 || errorcount || sorrycount);
4897 return target;
4900 void
4901 expand_builtin_trap (void)
4903 #ifdef HAVE_trap
4904 if (HAVE_trap)
4905 emit_insn (gen_trap ());
4906 else
4907 #endif
4908 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4909 emit_barrier ();
4912 /* Expand a call to __builtin_unreachable. We do nothing except emit
4913 a barrier saying that control flow will not pass here.
4915 It is the responsibility of the program being compiled to ensure
4916 that control flow does never reach __builtin_unreachable. */
4917 static void
4918 expand_builtin_unreachable (void)
4920 emit_barrier ();
4923 /* Expand EXP, a call to fabs, fabsf or fabsl.
4924 Return NULL_RTX if a normal call should be emitted rather than expanding
4925 the function inline. If convenient, the result should be placed
4926 in TARGET. SUBTARGET may be used as the target for computing
4927 the operand. */
4929 static rtx
4930 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4932 enum machine_mode mode;
4933 tree arg;
4934 rtx op0;
4936 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4937 return NULL_RTX;
4939 arg = CALL_EXPR_ARG (exp, 0);
4940 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4941 mode = TYPE_MODE (TREE_TYPE (arg));
4942 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4943 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4946 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4947 Return NULL is a normal call should be emitted rather than expanding the
4948 function inline. If convenient, the result should be placed in TARGET.
4949 SUBTARGET may be used as the target for computing the operand. */
4951 static rtx
4952 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4954 rtx op0, op1;
4955 tree arg;
4957 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4958 return NULL_RTX;
4960 arg = CALL_EXPR_ARG (exp, 0);
4961 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4963 arg = CALL_EXPR_ARG (exp, 1);
4964 op1 = expand_normal (arg);
4966 return expand_copysign (op0, op1, target);
4969 /* Create a new constant string literal and return a char* pointer to it.
4970 The STRING_CST value is the LEN characters at STR. */
4971 tree
4972 build_string_literal (int len, const char *str)
4974 tree t, elem, index, type;
4976 t = build_string (len, str);
4977 elem = build_type_variant (char_type_node, 1, 0);
4978 index = build_index_type (size_int (len - 1));
4979 type = build_array_type (elem, index);
4980 TREE_TYPE (t) = type;
4981 TREE_CONSTANT (t) = 1;
4982 TREE_READONLY (t) = 1;
4983 TREE_STATIC (t) = 1;
4985 type = build_pointer_type (elem);
4986 t = build1 (ADDR_EXPR, type,
4987 build4 (ARRAY_REF, elem,
4988 t, integer_zero_node, NULL_TREE, NULL_TREE));
4989 return t;
4992 /* Expand a call to either the entry or exit function profiler. */
4994 static rtx
4995 expand_builtin_profile_func (bool exitp)
4997 rtx this_rtx, which;
4999 this_rtx = DECL_RTL (current_function_decl);
5000 gcc_assert (MEM_P (this_rtx));
5001 this_rtx = XEXP (this_rtx, 0);
5003 if (exitp)
5004 which = profile_function_exit_libfunc;
5005 else
5006 which = profile_function_entry_libfunc;
5008 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5009 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5011 Pmode);
5013 return const0_rtx;
5016 /* Expand a call to __builtin___clear_cache. */
5018 static rtx
5019 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5021 #ifndef HAVE_clear_cache
5022 #ifdef CLEAR_INSN_CACHE
5023 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5024 does something. Just do the default expansion to a call to
5025 __clear_cache(). */
5026 return NULL_RTX;
5027 #else
5028 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5029 does nothing. There is no need to call it. Do nothing. */
5030 return const0_rtx;
5031 #endif /* CLEAR_INSN_CACHE */
5032 #else
5033 /* We have a "clear_cache" insn, and it will handle everything. */
5034 tree begin, end;
5035 rtx begin_rtx, end_rtx;
5036 enum insn_code icode;
5038 /* We must not expand to a library call. If we did, any
5039 fallback library function in libgcc that might contain a call to
5040 __builtin___clear_cache() would recurse infinitely. */
5041 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5043 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5044 return const0_rtx;
5047 if (HAVE_clear_cache)
5049 icode = CODE_FOR_clear_cache;
5051 begin = CALL_EXPR_ARG (exp, 0);
5052 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5053 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5054 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5055 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5057 end = CALL_EXPR_ARG (exp, 1);
5058 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5059 end_rtx = convert_memory_address (Pmode, end_rtx);
5060 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5061 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5063 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5065 return const0_rtx;
5066 #endif /* HAVE_clear_cache */
5069 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5071 static rtx
5072 round_trampoline_addr (rtx tramp)
5074 rtx temp, addend, mask;
5076 /* If we don't need too much alignment, we'll have been guaranteed
5077 proper alignment by get_trampoline_type. */
5078 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5079 return tramp;
5081 /* Round address up to desired boundary. */
5082 temp = gen_reg_rtx (Pmode);
5083 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5084 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5086 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5087 temp, 0, OPTAB_LIB_WIDEN);
5088 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5089 temp, 0, OPTAB_LIB_WIDEN);
5091 return tramp;
5094 static rtx
5095 expand_builtin_init_trampoline (tree exp)
5097 tree t_tramp, t_func, t_chain;
5098 rtx m_tramp, r_tramp, r_chain, tmp;
5100 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5101 POINTER_TYPE, VOID_TYPE))
5102 return NULL_RTX;
5104 t_tramp = CALL_EXPR_ARG (exp, 0);
5105 t_func = CALL_EXPR_ARG (exp, 1);
5106 t_chain = CALL_EXPR_ARG (exp, 2);
5108 r_tramp = expand_normal (t_tramp);
5109 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5110 MEM_NOTRAP_P (m_tramp) = 1;
5112 /* The TRAMP argument should be the address of a field within the
5113 local function's FRAME decl. Let's see if we can fill in the
5114 to fill in the MEM_ATTRs for this memory. */
5115 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5116 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5117 true, 0);
5119 tmp = round_trampoline_addr (r_tramp);
5120 if (tmp != r_tramp)
5122 m_tramp = change_address (m_tramp, BLKmode, tmp);
5123 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5124 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5127 /* The FUNC argument should be the address of the nested function.
5128 Extract the actual function decl to pass to the hook. */
5129 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5130 t_func = TREE_OPERAND (t_func, 0);
5131 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5133 r_chain = expand_normal (t_chain);
5135 /* Generate insns to initialize the trampoline. */
5136 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5138 trampolines_created = 1;
5139 return const0_rtx;
5142 static rtx
5143 expand_builtin_adjust_trampoline (tree exp)
5145 rtx tramp;
5147 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5148 return NULL_RTX;
5150 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5151 tramp = round_trampoline_addr (tramp);
5152 if (targetm.calls.trampoline_adjust_address)
5153 tramp = targetm.calls.trampoline_adjust_address (tramp);
5155 return tramp;
5158 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5159 function. The function first checks whether the back end provides
5160 an insn to implement signbit for the respective mode. If not, it
5161 checks whether the floating point format of the value is such that
5162 the sign bit can be extracted. If that is not the case, the
5163 function returns NULL_RTX to indicate that a normal call should be
5164 emitted rather than expanding the function in-line. EXP is the
5165 expression that is a call to the builtin function; if convenient,
5166 the result should be placed in TARGET. */
5167 static rtx
5168 expand_builtin_signbit (tree exp, rtx target)
5170 const struct real_format *fmt;
5171 enum machine_mode fmode, imode, rmode;
5172 HOST_WIDE_INT hi, lo;
5173 tree arg;
5174 int word, bitpos;
5175 enum insn_code icode;
5176 rtx temp;
5177 location_t loc = EXPR_LOCATION (exp);
5179 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5180 return NULL_RTX;
5182 arg = CALL_EXPR_ARG (exp, 0);
5183 fmode = TYPE_MODE (TREE_TYPE (arg));
5184 rmode = TYPE_MODE (TREE_TYPE (exp));
5185 fmt = REAL_MODE_FORMAT (fmode);
5187 arg = builtin_save_expr (arg);
5189 /* Expand the argument yielding a RTX expression. */
5190 temp = expand_normal (arg);
5192 /* Check if the back end provides an insn that handles signbit for the
5193 argument's mode. */
5194 icode = signbit_optab->handlers [(int) fmode].insn_code;
5195 if (icode != CODE_FOR_nothing)
5197 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5198 emit_unop_insn (icode, target, temp, UNKNOWN);
5199 return target;
5202 /* For floating point formats without a sign bit, implement signbit
5203 as "ARG < 0.0". */
5204 bitpos = fmt->signbit_ro;
5205 if (bitpos < 0)
5207 /* But we can't do this if the format supports signed zero. */
5208 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5209 return NULL_RTX;
5211 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5212 build_real (TREE_TYPE (arg), dconst0));
5213 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5216 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5218 imode = int_mode_for_mode (fmode);
5219 if (imode == BLKmode)
5220 return NULL_RTX;
5221 temp = gen_lowpart (imode, temp);
5223 else
5225 imode = word_mode;
5226 /* Handle targets with different FP word orders. */
5227 if (FLOAT_WORDS_BIG_ENDIAN)
5228 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5229 else
5230 word = bitpos / BITS_PER_WORD;
5231 temp = operand_subword_force (temp, word, fmode);
5232 bitpos = bitpos % BITS_PER_WORD;
5235 /* Force the intermediate word_mode (or narrower) result into a
5236 register. This avoids attempting to create paradoxical SUBREGs
5237 of floating point modes below. */
5238 temp = force_reg (imode, temp);
5240 /* If the bitpos is within the "result mode" lowpart, the operation
5241 can be implement with a single bitwise AND. Otherwise, we need
5242 a right shift and an AND. */
5244 if (bitpos < GET_MODE_BITSIZE (rmode))
5246 if (bitpos < HOST_BITS_PER_WIDE_INT)
5248 hi = 0;
5249 lo = (HOST_WIDE_INT) 1 << bitpos;
5251 else
5253 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5254 lo = 0;
5257 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5258 temp = gen_lowpart (rmode, temp);
5259 temp = expand_binop (rmode, and_optab, temp,
5260 immed_double_const (lo, hi, rmode),
5261 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5263 else
5265 /* Perform a logical right shift to place the signbit in the least
5266 significant bit, then truncate the result to the desired mode
5267 and mask just this bit. */
5268 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5269 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5270 temp = gen_lowpart (rmode, temp);
5271 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5272 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5275 return temp;
5278 /* Expand fork or exec calls. TARGET is the desired target of the
5279 call. EXP is the call. FN is the
5280 identificator of the actual function. IGNORE is nonzero if the
5281 value is to be ignored. */
5283 static rtx
5284 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5286 tree id, decl;
5287 tree call;
5289 /* If we are not profiling, just call the function. */
5290 if (!profile_arc_flag)
5291 return NULL_RTX;
5293 /* Otherwise call the wrapper. This should be equivalent for the rest of
5294 compiler, so the code does not diverge, and the wrapper may run the
5295 code necessary for keeping the profiling sane. */
5297 switch (DECL_FUNCTION_CODE (fn))
5299 case BUILT_IN_FORK:
5300 id = get_identifier ("__gcov_fork");
5301 break;
5303 case BUILT_IN_EXECL:
5304 id = get_identifier ("__gcov_execl");
5305 break;
5307 case BUILT_IN_EXECV:
5308 id = get_identifier ("__gcov_execv");
5309 break;
5311 case BUILT_IN_EXECLP:
5312 id = get_identifier ("__gcov_execlp");
5313 break;
5315 case BUILT_IN_EXECLE:
5316 id = get_identifier ("__gcov_execle");
5317 break;
5319 case BUILT_IN_EXECVP:
5320 id = get_identifier ("__gcov_execvp");
5321 break;
5323 case BUILT_IN_EXECVE:
5324 id = get_identifier ("__gcov_execve");
5325 break;
5327 default:
5328 gcc_unreachable ();
5331 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5332 FUNCTION_DECL, id, TREE_TYPE (fn));
5333 DECL_EXTERNAL (decl) = 1;
5334 TREE_PUBLIC (decl) = 1;
5335 DECL_ARTIFICIAL (decl) = 1;
5336 TREE_NOTHROW (decl) = 1;
5337 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5338 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5339 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5340 return expand_call (call, target, ignore);
5345 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5346 the pointer in these functions is void*, the tree optimizers may remove
5347 casts. The mode computed in expand_builtin isn't reliable either, due
5348 to __sync_bool_compare_and_swap.
5350 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5351 group of builtins. This gives us log2 of the mode size. */
5353 static inline enum machine_mode
5354 get_builtin_sync_mode (int fcode_diff)
5356 /* The size is not negotiable, so ask not to get BLKmode in return
5357 if the target indicates that a smaller size would be better. */
5358 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5361 /* Expand the memory expression LOC and return the appropriate memory operand
5362 for the builtin_sync operations. */
5364 static rtx
5365 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5367 rtx addr, mem;
5369 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5370 addr = convert_memory_address (Pmode, addr);
5372 /* Note that we explicitly do not want any alias information for this
5373 memory, so that we kill all other live memories. Otherwise we don't
5374 satisfy the full barrier semantics of the intrinsic. */
5375 mem = validize_mem (gen_rtx_MEM (mode, addr));
5377 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5378 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5379 MEM_VOLATILE_P (mem) = 1;
5381 return mem;
5384 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5385 EXP is the CALL_EXPR. CODE is the rtx code
5386 that corresponds to the arithmetic or logical operation from the name;
5387 an exception here is that NOT actually means NAND. TARGET is an optional
5388 place for us to store the results; AFTER is true if this is the
5389 fetch_and_xxx form. IGNORE is true if we don't actually care about
5390 the result of the operation at all. */
5392 static rtx
5393 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5394 enum rtx_code code, bool after,
5395 rtx target, bool ignore)
5397 rtx val, mem;
5398 enum machine_mode old_mode;
5399 location_t loc = EXPR_LOCATION (exp);
5401 if (code == NOT && warn_sync_nand)
5403 tree fndecl = get_callee_fndecl (exp);
5404 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5406 static bool warned_f_a_n, warned_n_a_f;
5408 switch (fcode)
5410 case BUILT_IN_FETCH_AND_NAND_1:
5411 case BUILT_IN_FETCH_AND_NAND_2:
5412 case BUILT_IN_FETCH_AND_NAND_4:
5413 case BUILT_IN_FETCH_AND_NAND_8:
5414 case BUILT_IN_FETCH_AND_NAND_16:
5416 if (warned_f_a_n)
5417 break;
5419 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5420 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5421 warned_f_a_n = true;
5422 break;
5424 case BUILT_IN_NAND_AND_FETCH_1:
5425 case BUILT_IN_NAND_AND_FETCH_2:
5426 case BUILT_IN_NAND_AND_FETCH_4:
5427 case BUILT_IN_NAND_AND_FETCH_8:
5428 case BUILT_IN_NAND_AND_FETCH_16:
5430 if (warned_n_a_f)
5431 break;
5433 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5434 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5435 warned_n_a_f = true;
5436 break;
5438 default:
5439 gcc_unreachable ();
5443 /* Expand the operands. */
5444 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5446 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5447 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5448 of CONST_INTs, where we know the old_mode only from the call argument. */
5449 old_mode = GET_MODE (val);
5450 if (old_mode == VOIDmode)
5451 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5452 val = convert_modes (mode, old_mode, val, 1);
5454 if (ignore)
5455 return expand_sync_operation (mem, val, code);
5456 else
5457 return expand_sync_fetch_operation (mem, val, code, after, target);
5460 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5461 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5462 true if this is the boolean form. TARGET is a place for us to store the
5463 results; this is NOT optional if IS_BOOL is true. */
5465 static rtx
5466 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5467 bool is_bool, rtx target)
5469 rtx old_val, new_val, mem;
5470 enum machine_mode old_mode;
5472 /* Expand the operands. */
5473 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5476 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5477 mode, EXPAND_NORMAL);
5478 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5479 of CONST_INTs, where we know the old_mode only from the call argument. */
5480 old_mode = GET_MODE (old_val);
5481 if (old_mode == VOIDmode)
5482 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5483 old_val = convert_modes (mode, old_mode, old_val, 1);
5485 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5486 mode, EXPAND_NORMAL);
5487 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5488 of CONST_INTs, where we know the old_mode only from the call argument. */
5489 old_mode = GET_MODE (new_val);
5490 if (old_mode == VOIDmode)
5491 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5492 new_val = convert_modes (mode, old_mode, new_val, 1);
5494 if (is_bool)
5495 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5496 else
5497 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5500 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5501 general form is actually an atomic exchange, and some targets only
5502 support a reduced form with the second argument being a constant 1.
5503 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5504 the results. */
5506 static rtx
5507 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5508 rtx target)
5510 rtx val, mem;
5511 enum machine_mode old_mode;
5513 /* Expand the operands. */
5514 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5515 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5516 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5517 of CONST_INTs, where we know the old_mode only from the call argument. */
5518 old_mode = GET_MODE (val);
5519 if (old_mode == VOIDmode)
5520 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5521 val = convert_modes (mode, old_mode, val, 1);
5523 return expand_sync_lock_test_and_set (mem, val, target);
5526 /* Expand the __sync_synchronize intrinsic. */
5528 static void
5529 expand_builtin_synchronize (void)
5531 gimple x;
5532 VEC (tree, gc) *v_clobbers;
5534 #ifdef HAVE_memory_barrier
5535 if (HAVE_memory_barrier)
5537 emit_insn (gen_memory_barrier ());
5538 return;
5540 #endif
5542 if (synchronize_libfunc != NULL_RTX)
5544 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5545 return;
5548 /* If no explicit memory barrier instruction is available, create an
5549 empty asm stmt with a memory clobber. */
5550 v_clobbers = VEC_alloc (tree, gc, 1);
5551 VEC_quick_push (tree, v_clobbers,
5552 tree_cons (NULL, build_string (6, "memory"), NULL));
5553 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5554 gimple_asm_set_volatile (x, true);
5555 expand_asm_stmt (x);
5558 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5560 static void
5561 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5563 enum insn_code icode;
5564 rtx mem, insn;
5565 rtx val = const0_rtx;
5567 /* Expand the operands. */
5568 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5570 /* If there is an explicit operation in the md file, use it. */
5571 icode = sync_lock_release[mode];
5572 if (icode != CODE_FOR_nothing)
5574 if (!insn_data[icode].operand[1].predicate (val, mode))
5575 val = force_reg (mode, val);
5577 insn = GEN_FCN (icode) (mem, val);
5578 if (insn)
5580 emit_insn (insn);
5581 return;
5585 /* Otherwise we can implement this operation by emitting a barrier
5586 followed by a store of zero. */
5587 expand_builtin_synchronize ();
5588 emit_move_insn (mem, val);
5591 /* Expand an expression EXP that calls a built-in function,
5592 with result going to TARGET if that's convenient
5593 (and in mode MODE if that's convenient).
5594 SUBTARGET may be used as the target for computing one of EXP's operands.
5595 IGNORE is nonzero if the value is to be ignored. */
5598 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5599 int ignore)
5601 tree fndecl = get_callee_fndecl (exp);
5602 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5603 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5605 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5606 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5608 /* When not optimizing, generate calls to library functions for a certain
5609 set of builtins. */
5610 if (!optimize
5611 && !called_as_built_in (fndecl)
5612 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5613 && fcode != BUILT_IN_ALLOCA
5614 && fcode != BUILT_IN_FREE)
5615 return expand_call (exp, target, ignore);
5617 /* The built-in function expanders test for target == const0_rtx
5618 to determine whether the function's result will be ignored. */
5619 if (ignore)
5620 target = const0_rtx;
5622 /* If the result of a pure or const built-in function is ignored, and
5623 none of its arguments are volatile, we can avoid expanding the
5624 built-in call and just evaluate the arguments for side-effects. */
5625 if (target == const0_rtx
5626 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5628 bool volatilep = false;
5629 tree arg;
5630 call_expr_arg_iterator iter;
5632 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5633 if (TREE_THIS_VOLATILE (arg))
5635 volatilep = true;
5636 break;
5639 if (! volatilep)
5641 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5642 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5643 return const0_rtx;
5647 switch (fcode)
5649 CASE_FLT_FN (BUILT_IN_FABS):
5650 target = expand_builtin_fabs (exp, target, subtarget);
5651 if (target)
5652 return target;
5653 break;
5655 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5656 target = expand_builtin_copysign (exp, target, subtarget);
5657 if (target)
5658 return target;
5659 break;
5661 /* Just do a normal library call if we were unable to fold
5662 the values. */
5663 CASE_FLT_FN (BUILT_IN_CABS):
5664 break;
5666 CASE_FLT_FN (BUILT_IN_EXP):
5667 CASE_FLT_FN (BUILT_IN_EXP10):
5668 CASE_FLT_FN (BUILT_IN_POW10):
5669 CASE_FLT_FN (BUILT_IN_EXP2):
5670 CASE_FLT_FN (BUILT_IN_EXPM1):
5671 CASE_FLT_FN (BUILT_IN_LOGB):
5672 CASE_FLT_FN (BUILT_IN_LOG):
5673 CASE_FLT_FN (BUILT_IN_LOG10):
5674 CASE_FLT_FN (BUILT_IN_LOG2):
5675 CASE_FLT_FN (BUILT_IN_LOG1P):
5676 CASE_FLT_FN (BUILT_IN_TAN):
5677 CASE_FLT_FN (BUILT_IN_ASIN):
5678 CASE_FLT_FN (BUILT_IN_ACOS):
5679 CASE_FLT_FN (BUILT_IN_ATAN):
5680 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5681 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5682 because of possible accuracy problems. */
5683 if (! flag_unsafe_math_optimizations)
5684 break;
5685 CASE_FLT_FN (BUILT_IN_SQRT):
5686 CASE_FLT_FN (BUILT_IN_FLOOR):
5687 CASE_FLT_FN (BUILT_IN_CEIL):
5688 CASE_FLT_FN (BUILT_IN_TRUNC):
5689 CASE_FLT_FN (BUILT_IN_ROUND):
5690 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5691 CASE_FLT_FN (BUILT_IN_RINT):
5692 target = expand_builtin_mathfn (exp, target, subtarget);
5693 if (target)
5694 return target;
5695 break;
5697 CASE_FLT_FN (BUILT_IN_ILOGB):
5698 if (! flag_unsafe_math_optimizations)
5699 break;
5700 CASE_FLT_FN (BUILT_IN_ISINF):
5701 CASE_FLT_FN (BUILT_IN_FINITE):
5702 case BUILT_IN_ISFINITE:
5703 case BUILT_IN_ISNORMAL:
5704 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5705 if (target)
5706 return target;
5707 break;
5709 CASE_FLT_FN (BUILT_IN_LCEIL):
5710 CASE_FLT_FN (BUILT_IN_LLCEIL):
5711 CASE_FLT_FN (BUILT_IN_LFLOOR):
5712 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5713 target = expand_builtin_int_roundingfn (exp, target);
5714 if (target)
5715 return target;
5716 break;
5718 CASE_FLT_FN (BUILT_IN_LRINT):
5719 CASE_FLT_FN (BUILT_IN_LLRINT):
5720 CASE_FLT_FN (BUILT_IN_LROUND):
5721 CASE_FLT_FN (BUILT_IN_LLROUND):
5722 target = expand_builtin_int_roundingfn_2 (exp, target);
5723 if (target)
5724 return target;
5725 break;
5727 CASE_FLT_FN (BUILT_IN_POW):
5728 target = expand_builtin_pow (exp, target, subtarget);
5729 if (target)
5730 return target;
5731 break;
5733 CASE_FLT_FN (BUILT_IN_POWI):
5734 target = expand_builtin_powi (exp, target, subtarget);
5735 if (target)
5736 return target;
5737 break;
5739 CASE_FLT_FN (BUILT_IN_ATAN2):
5740 CASE_FLT_FN (BUILT_IN_LDEXP):
5741 CASE_FLT_FN (BUILT_IN_SCALB):
5742 CASE_FLT_FN (BUILT_IN_SCALBN):
5743 CASE_FLT_FN (BUILT_IN_SCALBLN):
5744 if (! flag_unsafe_math_optimizations)
5745 break;
5747 CASE_FLT_FN (BUILT_IN_FMOD):
5748 CASE_FLT_FN (BUILT_IN_REMAINDER):
5749 CASE_FLT_FN (BUILT_IN_DREM):
5750 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5751 if (target)
5752 return target;
5753 break;
5755 CASE_FLT_FN (BUILT_IN_CEXPI):
5756 target = expand_builtin_cexpi (exp, target, subtarget);
5757 gcc_assert (target);
5758 return target;
5760 CASE_FLT_FN (BUILT_IN_SIN):
5761 CASE_FLT_FN (BUILT_IN_COS):
5762 if (! flag_unsafe_math_optimizations)
5763 break;
5764 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5765 if (target)
5766 return target;
5767 break;
5769 CASE_FLT_FN (BUILT_IN_SINCOS):
5770 if (! flag_unsafe_math_optimizations)
5771 break;
5772 target = expand_builtin_sincos (exp);
5773 if (target)
5774 return target;
5775 break;
5777 case BUILT_IN_APPLY_ARGS:
5778 return expand_builtin_apply_args ();
5780 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5781 FUNCTION with a copy of the parameters described by
5782 ARGUMENTS, and ARGSIZE. It returns a block of memory
5783 allocated on the stack into which is stored all the registers
5784 that might possibly be used for returning the result of a
5785 function. ARGUMENTS is the value returned by
5786 __builtin_apply_args. ARGSIZE is the number of bytes of
5787 arguments that must be copied. ??? How should this value be
5788 computed? We'll also need a safe worst case value for varargs
5789 functions. */
5790 case BUILT_IN_APPLY:
5791 if (!validate_arglist (exp, POINTER_TYPE,
5792 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5793 && !validate_arglist (exp, REFERENCE_TYPE,
5794 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5795 return const0_rtx;
5796 else
5798 rtx ops[3];
5800 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5801 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5802 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5804 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5807 /* __builtin_return (RESULT) causes the function to return the
5808 value described by RESULT. RESULT is address of the block of
5809 memory returned by __builtin_apply. */
5810 case BUILT_IN_RETURN:
5811 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5812 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5813 return const0_rtx;
5815 case BUILT_IN_SAVEREGS:
5816 return expand_builtin_saveregs ();
5818 case BUILT_IN_ARGS_INFO:
5819 return expand_builtin_args_info (exp);
5821 case BUILT_IN_VA_ARG_PACK:
5822 /* All valid uses of __builtin_va_arg_pack () are removed during
5823 inlining. */
5824 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5825 return const0_rtx;
5827 case BUILT_IN_VA_ARG_PACK_LEN:
5828 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5829 inlining. */
5830 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5831 return const0_rtx;
5833 /* Return the address of the first anonymous stack arg. */
5834 case BUILT_IN_NEXT_ARG:
5835 if (fold_builtin_next_arg (exp, false))
5836 return const0_rtx;
5837 return expand_builtin_next_arg ();
5839 case BUILT_IN_CLEAR_CACHE:
5840 target = expand_builtin___clear_cache (exp);
5841 if (target)
5842 return target;
5843 break;
5845 case BUILT_IN_CLASSIFY_TYPE:
5846 return expand_builtin_classify_type (exp);
5848 case BUILT_IN_CONSTANT_P:
5849 return const0_rtx;
5851 case BUILT_IN_FRAME_ADDRESS:
5852 case BUILT_IN_RETURN_ADDRESS:
5853 return expand_builtin_frame_address (fndecl, exp);
5855 /* Returns the address of the area where the structure is returned.
5856 0 otherwise. */
5857 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5858 if (call_expr_nargs (exp) != 0
5859 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5860 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5861 return const0_rtx;
5862 else
5863 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5865 case BUILT_IN_ALLOCA:
5866 target = expand_builtin_alloca (exp, target);
5867 if (target)
5868 return target;
5869 break;
5871 case BUILT_IN_STACK_SAVE:
5872 return expand_stack_save ();
5874 case BUILT_IN_STACK_RESTORE:
5875 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5876 return const0_rtx;
5878 case BUILT_IN_BSWAP32:
5879 case BUILT_IN_BSWAP64:
5880 target = expand_builtin_bswap (exp, target, subtarget);
5882 if (target)
5883 return target;
5884 break;
5886 CASE_INT_FN (BUILT_IN_FFS):
5887 case BUILT_IN_FFSIMAX:
5888 target = expand_builtin_unop (target_mode, exp, target,
5889 subtarget, ffs_optab);
5890 if (target)
5891 return target;
5892 break;
5894 CASE_INT_FN (BUILT_IN_CLZ):
5895 case BUILT_IN_CLZIMAX:
5896 target = expand_builtin_unop (target_mode, exp, target,
5897 subtarget, clz_optab);
5898 if (target)
5899 return target;
5900 break;
5902 CASE_INT_FN (BUILT_IN_CTZ):
5903 case BUILT_IN_CTZIMAX:
5904 target = expand_builtin_unop (target_mode, exp, target,
5905 subtarget, ctz_optab);
5906 if (target)
5907 return target;
5908 break;
5910 CASE_INT_FN (BUILT_IN_POPCOUNT):
5911 case BUILT_IN_POPCOUNTIMAX:
5912 target = expand_builtin_unop (target_mode, exp, target,
5913 subtarget, popcount_optab);
5914 if (target)
5915 return target;
5916 break;
5918 CASE_INT_FN (BUILT_IN_PARITY):
5919 case BUILT_IN_PARITYIMAX:
5920 target = expand_builtin_unop (target_mode, exp, target,
5921 subtarget, parity_optab);
5922 if (target)
5923 return target;
5924 break;
5926 case BUILT_IN_STRLEN:
5927 target = expand_builtin_strlen (exp, target, target_mode);
5928 if (target)
5929 return target;
5930 break;
5932 case BUILT_IN_STRCPY:
5933 target = expand_builtin_strcpy (exp, target);
5934 if (target)
5935 return target;
5936 break;
5938 case BUILT_IN_STRNCPY:
5939 target = expand_builtin_strncpy (exp, target);
5940 if (target)
5941 return target;
5942 break;
5944 case BUILT_IN_STPCPY:
5945 target = expand_builtin_stpcpy (exp, target, mode);
5946 if (target)
5947 return target;
5948 break;
5950 case BUILT_IN_MEMCPY:
5951 target = expand_builtin_memcpy (exp, target);
5952 if (target)
5953 return target;
5954 break;
5956 case BUILT_IN_MEMPCPY:
5957 target = expand_builtin_mempcpy (exp, target, mode);
5958 if (target)
5959 return target;
5960 break;
5962 case BUILT_IN_MEMSET:
5963 target = expand_builtin_memset (exp, target, mode);
5964 if (target)
5965 return target;
5966 break;
5968 case BUILT_IN_BZERO:
5969 target = expand_builtin_bzero (exp);
5970 if (target)
5971 return target;
5972 break;
5974 case BUILT_IN_STRCMP:
5975 target = expand_builtin_strcmp (exp, target);
5976 if (target)
5977 return target;
5978 break;
5980 case BUILT_IN_STRNCMP:
5981 target = expand_builtin_strncmp (exp, target, mode);
5982 if (target)
5983 return target;
5984 break;
5986 case BUILT_IN_BCMP:
5987 case BUILT_IN_MEMCMP:
5988 target = expand_builtin_memcmp (exp, target, mode);
5989 if (target)
5990 return target;
5991 break;
5993 case BUILT_IN_SETJMP:
5994 /* This should have been lowered to the builtins below. */
5995 gcc_unreachable ();
5997 case BUILT_IN_SETJMP_SETUP:
5998 /* __builtin_setjmp_setup is passed a pointer to an array of five words
5999 and the receiver label. */
6000 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6002 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6003 VOIDmode, EXPAND_NORMAL);
6004 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6005 rtx label_r = label_rtx (label);
6007 /* This is copied from the handling of non-local gotos. */
6008 expand_builtin_setjmp_setup (buf_addr, label_r);
6009 nonlocal_goto_handler_labels
6010 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6011 nonlocal_goto_handler_labels);
6012 /* ??? Do not let expand_label treat us as such since we would
6013 not want to be both on the list of non-local labels and on
6014 the list of forced labels. */
6015 FORCED_LABEL (label) = 0;
6016 return const0_rtx;
6018 break;
6020 case BUILT_IN_SETJMP_DISPATCHER:
6021 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6022 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6024 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6025 rtx label_r = label_rtx (label);
6027 /* Remove the dispatcher label from the list of non-local labels
6028 since the receiver labels have been added to it above. */
6029 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6030 return const0_rtx;
6032 break;
6034 case BUILT_IN_SETJMP_RECEIVER:
6035 /* __builtin_setjmp_receiver is passed the receiver label. */
6036 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6038 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6039 rtx label_r = label_rtx (label);
6041 expand_builtin_setjmp_receiver (label_r);
6042 return const0_rtx;
6044 break;
6046 /* __builtin_longjmp is passed a pointer to an array of five words.
6047 It's similar to the C library longjmp function but works with
6048 __builtin_setjmp above. */
6049 case BUILT_IN_LONGJMP:
6050 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6052 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6053 VOIDmode, EXPAND_NORMAL);
6054 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6056 if (value != const1_rtx)
6058 error ("%<__builtin_longjmp%> second argument must be 1");
6059 return const0_rtx;
6062 expand_builtin_longjmp (buf_addr, value);
6063 return const0_rtx;
6065 break;
6067 case BUILT_IN_NONLOCAL_GOTO:
6068 target = expand_builtin_nonlocal_goto (exp);
6069 if (target)
6070 return target;
6071 break;
6073 /* This updates the setjmp buffer that is its argument with the value
6074 of the current stack pointer. */
6075 case BUILT_IN_UPDATE_SETJMP_BUF:
6076 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6078 rtx buf_addr
6079 = expand_normal (CALL_EXPR_ARG (exp, 0));
6081 expand_builtin_update_setjmp_buf (buf_addr);
6082 return const0_rtx;
6084 break;
6086 case BUILT_IN_TRAP:
6087 expand_builtin_trap ();
6088 return const0_rtx;
6090 case BUILT_IN_UNREACHABLE:
6091 expand_builtin_unreachable ();
6092 return const0_rtx;
6094 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6095 case BUILT_IN_SIGNBITD32:
6096 case BUILT_IN_SIGNBITD64:
6097 case BUILT_IN_SIGNBITD128:
6098 target = expand_builtin_signbit (exp, target);
6099 if (target)
6100 return target;
6101 break;
6103 /* Various hooks for the DWARF 2 __throw routine. */
6104 case BUILT_IN_UNWIND_INIT:
6105 expand_builtin_unwind_init ();
6106 return const0_rtx;
6107 case BUILT_IN_DWARF_CFA:
6108 return virtual_cfa_rtx;
6109 #ifdef DWARF2_UNWIND_INFO
6110 case BUILT_IN_DWARF_SP_COLUMN:
6111 return expand_builtin_dwarf_sp_column ();
6112 case BUILT_IN_INIT_DWARF_REG_SIZES:
6113 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6114 return const0_rtx;
6115 #endif
6116 case BUILT_IN_FROB_RETURN_ADDR:
6117 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6118 case BUILT_IN_EXTRACT_RETURN_ADDR:
6119 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6120 case BUILT_IN_EH_RETURN:
6121 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6122 CALL_EXPR_ARG (exp, 1));
6123 return const0_rtx;
6124 #ifdef EH_RETURN_DATA_REGNO
6125 case BUILT_IN_EH_RETURN_DATA_REGNO:
6126 return expand_builtin_eh_return_data_regno (exp);
6127 #endif
6128 case BUILT_IN_EXTEND_POINTER:
6129 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6130 case BUILT_IN_EH_POINTER:
6131 return expand_builtin_eh_pointer (exp);
6132 case BUILT_IN_EH_FILTER:
6133 return expand_builtin_eh_filter (exp);
6134 case BUILT_IN_EH_COPY_VALUES:
6135 return expand_builtin_eh_copy_values (exp);
6137 case BUILT_IN_VA_START:
6138 return expand_builtin_va_start (exp);
6139 case BUILT_IN_VA_END:
6140 return expand_builtin_va_end (exp);
6141 case BUILT_IN_VA_COPY:
6142 return expand_builtin_va_copy (exp);
6143 case BUILT_IN_EXPECT:
6144 return expand_builtin_expect (exp, target);
6145 case BUILT_IN_PREFETCH:
6146 expand_builtin_prefetch (exp);
6147 return const0_rtx;
6149 case BUILT_IN_PROFILE_FUNC_ENTER:
6150 return expand_builtin_profile_func (false);
6151 case BUILT_IN_PROFILE_FUNC_EXIT:
6152 return expand_builtin_profile_func (true);
6154 case BUILT_IN_INIT_TRAMPOLINE:
6155 return expand_builtin_init_trampoline (exp);
6156 case BUILT_IN_ADJUST_TRAMPOLINE:
6157 return expand_builtin_adjust_trampoline (exp);
6159 case BUILT_IN_FORK:
6160 case BUILT_IN_EXECL:
6161 case BUILT_IN_EXECV:
6162 case BUILT_IN_EXECLP:
6163 case BUILT_IN_EXECLE:
6164 case BUILT_IN_EXECVP:
6165 case BUILT_IN_EXECVE:
6166 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6167 if (target)
6168 return target;
6169 break;
6171 case BUILT_IN_FETCH_AND_ADD_1:
6172 case BUILT_IN_FETCH_AND_ADD_2:
6173 case BUILT_IN_FETCH_AND_ADD_4:
6174 case BUILT_IN_FETCH_AND_ADD_8:
6175 case BUILT_IN_FETCH_AND_ADD_16:
6176 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6177 target = expand_builtin_sync_operation (mode, exp, PLUS,
6178 false, target, ignore);
6179 if (target)
6180 return target;
6181 break;
6183 case BUILT_IN_FETCH_AND_SUB_1:
6184 case BUILT_IN_FETCH_AND_SUB_2:
6185 case BUILT_IN_FETCH_AND_SUB_4:
6186 case BUILT_IN_FETCH_AND_SUB_8:
6187 case BUILT_IN_FETCH_AND_SUB_16:
6188 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6189 target = expand_builtin_sync_operation (mode, exp, MINUS,
6190 false, target, ignore);
6191 if (target)
6192 return target;
6193 break;
6195 case BUILT_IN_FETCH_AND_OR_1:
6196 case BUILT_IN_FETCH_AND_OR_2:
6197 case BUILT_IN_FETCH_AND_OR_4:
6198 case BUILT_IN_FETCH_AND_OR_8:
6199 case BUILT_IN_FETCH_AND_OR_16:
6200 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6201 target = expand_builtin_sync_operation (mode, exp, IOR,
6202 false, target, ignore);
6203 if (target)
6204 return target;
6205 break;
6207 case BUILT_IN_FETCH_AND_AND_1:
6208 case BUILT_IN_FETCH_AND_AND_2:
6209 case BUILT_IN_FETCH_AND_AND_4:
6210 case BUILT_IN_FETCH_AND_AND_8:
6211 case BUILT_IN_FETCH_AND_AND_16:
6212 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6213 target = expand_builtin_sync_operation (mode, exp, AND,
6214 false, target, ignore);
6215 if (target)
6216 return target;
6217 break;
6219 case BUILT_IN_FETCH_AND_XOR_1:
6220 case BUILT_IN_FETCH_AND_XOR_2:
6221 case BUILT_IN_FETCH_AND_XOR_4:
6222 case BUILT_IN_FETCH_AND_XOR_8:
6223 case BUILT_IN_FETCH_AND_XOR_16:
6224 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6225 target = expand_builtin_sync_operation (mode, exp, XOR,
6226 false, target, ignore);
6227 if (target)
6228 return target;
6229 break;
6231 case BUILT_IN_FETCH_AND_NAND_1:
6232 case BUILT_IN_FETCH_AND_NAND_2:
6233 case BUILT_IN_FETCH_AND_NAND_4:
6234 case BUILT_IN_FETCH_AND_NAND_8:
6235 case BUILT_IN_FETCH_AND_NAND_16:
6236 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6237 target = expand_builtin_sync_operation (mode, exp, NOT,
6238 false, target, ignore);
6239 if (target)
6240 return target;
6241 break;
6243 case BUILT_IN_ADD_AND_FETCH_1:
6244 case BUILT_IN_ADD_AND_FETCH_2:
6245 case BUILT_IN_ADD_AND_FETCH_4:
6246 case BUILT_IN_ADD_AND_FETCH_8:
6247 case BUILT_IN_ADD_AND_FETCH_16:
6248 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6249 target = expand_builtin_sync_operation (mode, exp, PLUS,
6250 true, target, ignore);
6251 if (target)
6252 return target;
6253 break;
6255 case BUILT_IN_SUB_AND_FETCH_1:
6256 case BUILT_IN_SUB_AND_FETCH_2:
6257 case BUILT_IN_SUB_AND_FETCH_4:
6258 case BUILT_IN_SUB_AND_FETCH_8:
6259 case BUILT_IN_SUB_AND_FETCH_16:
6260 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6261 target = expand_builtin_sync_operation (mode, exp, MINUS,
6262 true, target, ignore);
6263 if (target)
6264 return target;
6265 break;
6267 case BUILT_IN_OR_AND_FETCH_1:
6268 case BUILT_IN_OR_AND_FETCH_2:
6269 case BUILT_IN_OR_AND_FETCH_4:
6270 case BUILT_IN_OR_AND_FETCH_8:
6271 case BUILT_IN_OR_AND_FETCH_16:
6272 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6273 target = expand_builtin_sync_operation (mode, exp, IOR,
6274 true, target, ignore);
6275 if (target)
6276 return target;
6277 break;
6279 case BUILT_IN_AND_AND_FETCH_1:
6280 case BUILT_IN_AND_AND_FETCH_2:
6281 case BUILT_IN_AND_AND_FETCH_4:
6282 case BUILT_IN_AND_AND_FETCH_8:
6283 case BUILT_IN_AND_AND_FETCH_16:
6284 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6285 target = expand_builtin_sync_operation (mode, exp, AND,
6286 true, target, ignore);
6287 if (target)
6288 return target;
6289 break;
6291 case BUILT_IN_XOR_AND_FETCH_1:
6292 case BUILT_IN_XOR_AND_FETCH_2:
6293 case BUILT_IN_XOR_AND_FETCH_4:
6294 case BUILT_IN_XOR_AND_FETCH_8:
6295 case BUILT_IN_XOR_AND_FETCH_16:
6296 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6297 target = expand_builtin_sync_operation (mode, exp, XOR,
6298 true, target, ignore);
6299 if (target)
6300 return target;
6301 break;
6303 case BUILT_IN_NAND_AND_FETCH_1:
6304 case BUILT_IN_NAND_AND_FETCH_2:
6305 case BUILT_IN_NAND_AND_FETCH_4:
6306 case BUILT_IN_NAND_AND_FETCH_8:
6307 case BUILT_IN_NAND_AND_FETCH_16:
6308 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6309 target = expand_builtin_sync_operation (mode, exp, NOT,
6310 true, target, ignore);
6311 if (target)
6312 return target;
6313 break;
6315 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6316 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6317 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6318 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6319 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6320 if (mode == VOIDmode)
6321 mode = TYPE_MODE (boolean_type_node);
6322 if (!target || !register_operand (target, mode))
6323 target = gen_reg_rtx (mode);
6325 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6326 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6327 if (target)
6328 return target;
6329 break;
6331 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6332 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6333 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6334 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6335 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6336 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6337 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6338 if (target)
6339 return target;
6340 break;
6342 case BUILT_IN_LOCK_TEST_AND_SET_1:
6343 case BUILT_IN_LOCK_TEST_AND_SET_2:
6344 case BUILT_IN_LOCK_TEST_AND_SET_4:
6345 case BUILT_IN_LOCK_TEST_AND_SET_8:
6346 case BUILT_IN_LOCK_TEST_AND_SET_16:
6347 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6348 target = expand_builtin_lock_test_and_set (mode, exp, target);
6349 if (target)
6350 return target;
6351 break;
6353 case BUILT_IN_LOCK_RELEASE_1:
6354 case BUILT_IN_LOCK_RELEASE_2:
6355 case BUILT_IN_LOCK_RELEASE_4:
6356 case BUILT_IN_LOCK_RELEASE_8:
6357 case BUILT_IN_LOCK_RELEASE_16:
6358 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6359 expand_builtin_lock_release (mode, exp);
6360 return const0_rtx;
6362 case BUILT_IN_SYNCHRONIZE:
6363 expand_builtin_synchronize ();
6364 return const0_rtx;
6366 case BUILT_IN_OBJECT_SIZE:
6367 return expand_builtin_object_size (exp);
6369 case BUILT_IN_MEMCPY_CHK:
6370 case BUILT_IN_MEMPCPY_CHK:
6371 case BUILT_IN_MEMMOVE_CHK:
6372 case BUILT_IN_MEMSET_CHK:
6373 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6374 if (target)
6375 return target;
6376 break;
6378 case BUILT_IN_STRCPY_CHK:
6379 case BUILT_IN_STPCPY_CHK:
6380 case BUILT_IN_STRNCPY_CHK:
6381 case BUILT_IN_STRCAT_CHK:
6382 case BUILT_IN_STRNCAT_CHK:
6383 case BUILT_IN_SNPRINTF_CHK:
6384 case BUILT_IN_VSNPRINTF_CHK:
6385 maybe_emit_chk_warning (exp, fcode);
6386 break;
6388 case BUILT_IN_SPRINTF_CHK:
6389 case BUILT_IN_VSPRINTF_CHK:
6390 maybe_emit_sprintf_chk_warning (exp, fcode);
6391 break;
6393 case BUILT_IN_FREE:
6394 maybe_emit_free_warning (exp);
6395 break;
6397 default: /* just do library call, if unknown builtin */
6398 break;
6401 /* The switch statement above can drop through to cause the function
6402 to be called normally. */
6403 return expand_call (exp, target, ignore);
6406 /* Determine whether a tree node represents a call to a built-in
6407 function. If the tree T is a call to a built-in function with
6408 the right number of arguments of the appropriate types, return
6409 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6410 Otherwise the return value is END_BUILTINS. */
6412 enum built_in_function
6413 builtin_mathfn_code (const_tree t)
6415 const_tree fndecl, arg, parmlist;
6416 const_tree argtype, parmtype;
6417 const_call_expr_arg_iterator iter;
6419 if (TREE_CODE (t) != CALL_EXPR
6420 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6421 return END_BUILTINS;
6423 fndecl = get_callee_fndecl (t);
6424 if (fndecl == NULL_TREE
6425 || TREE_CODE (fndecl) != FUNCTION_DECL
6426 || ! DECL_BUILT_IN (fndecl)
6427 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6428 return END_BUILTINS;
6430 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6431 init_const_call_expr_arg_iterator (t, &iter);
6432 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6434 /* If a function doesn't take a variable number of arguments,
6435 the last element in the list will have type `void'. */
6436 parmtype = TREE_VALUE (parmlist);
6437 if (VOID_TYPE_P (parmtype))
6439 if (more_const_call_expr_args_p (&iter))
6440 return END_BUILTINS;
6441 return DECL_FUNCTION_CODE (fndecl);
6444 if (! more_const_call_expr_args_p (&iter))
6445 return END_BUILTINS;
6447 arg = next_const_call_expr_arg (&iter);
6448 argtype = TREE_TYPE (arg);
6450 if (SCALAR_FLOAT_TYPE_P (parmtype))
6452 if (! SCALAR_FLOAT_TYPE_P (argtype))
6453 return END_BUILTINS;
6455 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6457 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6458 return END_BUILTINS;
6460 else if (POINTER_TYPE_P (parmtype))
6462 if (! POINTER_TYPE_P (argtype))
6463 return END_BUILTINS;
6465 else if (INTEGRAL_TYPE_P (parmtype))
6467 if (! INTEGRAL_TYPE_P (argtype))
6468 return END_BUILTINS;
6470 else
6471 return END_BUILTINS;
6474 /* Variable-length argument list. */
6475 return DECL_FUNCTION_CODE (fndecl);
6478 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6479 evaluate to a constant. */
6481 static tree
6482 fold_builtin_constant_p (tree arg)
6484 /* We return 1 for a numeric type that's known to be a constant
6485 value at compile-time or for an aggregate type that's a
6486 literal constant. */
6487 STRIP_NOPS (arg);
6489 /* If we know this is a constant, emit the constant of one. */
6490 if (CONSTANT_CLASS_P (arg)
6491 || (TREE_CODE (arg) == CONSTRUCTOR
6492 && TREE_CONSTANT (arg)))
6493 return integer_one_node;
6494 if (TREE_CODE (arg) == ADDR_EXPR)
6496 tree op = TREE_OPERAND (arg, 0);
6497 if (TREE_CODE (op) == STRING_CST
6498 || (TREE_CODE (op) == ARRAY_REF
6499 && integer_zerop (TREE_OPERAND (op, 1))
6500 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6501 return integer_one_node;
6504 /* If this expression has side effects, show we don't know it to be a
6505 constant. Likewise if it's a pointer or aggregate type since in
6506 those case we only want literals, since those are only optimized
6507 when generating RTL, not later.
6508 And finally, if we are compiling an initializer, not code, we
6509 need to return a definite result now; there's not going to be any
6510 more optimization done. */
6511 if (TREE_SIDE_EFFECTS (arg)
6512 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6513 || POINTER_TYPE_P (TREE_TYPE (arg))
6514 || cfun == 0
6515 || folding_initializer)
6516 return integer_zero_node;
6518 return NULL_TREE;
6521 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6522 return it as a truthvalue. */
6524 static tree
6525 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6527 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6529 fn = built_in_decls[BUILT_IN_EXPECT];
6530 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6531 ret_type = TREE_TYPE (TREE_TYPE (fn));
6532 pred_type = TREE_VALUE (arg_types);
6533 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6535 pred = fold_convert_loc (loc, pred_type, pred);
6536 expected = fold_convert_loc (loc, expected_type, expected);
6537 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6539 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6540 build_int_cst (ret_type, 0));
6543 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6544 NULL_TREE if no simplification is possible. */
6546 static tree
6547 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6549 tree inner, fndecl;
6550 enum tree_code code;
6552 /* If this is a builtin_expect within a builtin_expect keep the
6553 inner one. See through a comparison against a constant. It
6554 might have been added to create a thruthvalue. */
6555 inner = arg0;
6556 if (COMPARISON_CLASS_P (inner)
6557 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6558 inner = TREE_OPERAND (inner, 0);
6560 if (TREE_CODE (inner) == CALL_EXPR
6561 && (fndecl = get_callee_fndecl (inner))
6562 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6563 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6564 return arg0;
6566 /* Distribute the expected value over short-circuiting operators.
6567 See through the cast from truthvalue_type_node to long. */
6568 inner = arg0;
6569 while (TREE_CODE (inner) == NOP_EXPR
6570 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6571 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6572 inner = TREE_OPERAND (inner, 0);
6574 code = TREE_CODE (inner);
6575 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6577 tree op0 = TREE_OPERAND (inner, 0);
6578 tree op1 = TREE_OPERAND (inner, 1);
6580 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6581 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6582 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6584 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6587 /* If the argument isn't invariant then there's nothing else we can do. */
6588 if (!TREE_CONSTANT (arg0))
6589 return NULL_TREE;
6591 /* If we expect that a comparison against the argument will fold to
6592 a constant return the constant. In practice, this means a true
6593 constant or the address of a non-weak symbol. */
6594 inner = arg0;
6595 STRIP_NOPS (inner);
6596 if (TREE_CODE (inner) == ADDR_EXPR)
6600 inner = TREE_OPERAND (inner, 0);
6602 while (TREE_CODE (inner) == COMPONENT_REF
6603 || TREE_CODE (inner) == ARRAY_REF);
6604 if ((TREE_CODE (inner) == VAR_DECL
6605 || TREE_CODE (inner) == FUNCTION_DECL)
6606 && DECL_WEAK (inner))
6607 return NULL_TREE;
6610 /* Otherwise, ARG0 already has the proper type for the return value. */
6611 return arg0;
6614 /* Fold a call to __builtin_classify_type with argument ARG. */
6616 static tree
6617 fold_builtin_classify_type (tree arg)
6619 if (arg == 0)
6620 return build_int_cst (NULL_TREE, no_type_class);
6622 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6625 /* Fold a call to __builtin_strlen with argument ARG. */
6627 static tree
6628 fold_builtin_strlen (location_t loc, tree type, tree arg)
6630 if (!validate_arg (arg, POINTER_TYPE))
6631 return NULL_TREE;
6632 else
6634 tree len = c_strlen (arg, 0);
6636 if (len)
6637 return fold_convert_loc (loc, type, len);
6639 return NULL_TREE;
6643 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6645 static tree
6646 fold_builtin_inf (location_t loc, tree type, int warn)
6648 REAL_VALUE_TYPE real;
6650 /* __builtin_inff is intended to be usable to define INFINITY on all
6651 targets. If an infinity is not available, INFINITY expands "to a
6652 positive constant of type float that overflows at translation
6653 time", footnote "In this case, using INFINITY will violate the
6654 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6655 Thus we pedwarn to ensure this constraint violation is
6656 diagnosed. */
6657 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6658 pedwarn (loc, 0, "target format does not support infinity");
6660 real_inf (&real);
6661 return build_real (type, real);
6664 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6666 static tree
6667 fold_builtin_nan (tree arg, tree type, int quiet)
6669 REAL_VALUE_TYPE real;
6670 const char *str;
6672 if (!validate_arg (arg, POINTER_TYPE))
6673 return NULL_TREE;
6674 str = c_getstr (arg);
6675 if (!str)
6676 return NULL_TREE;
6678 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6679 return NULL_TREE;
6681 return build_real (type, real);
6684 /* Return true if the floating point expression T has an integer value.
6685 We also allow +Inf, -Inf and NaN to be considered integer values. */
6687 static bool
6688 integer_valued_real_p (tree t)
6690 switch (TREE_CODE (t))
6692 case FLOAT_EXPR:
6693 return true;
6695 case ABS_EXPR:
6696 case SAVE_EXPR:
6697 return integer_valued_real_p (TREE_OPERAND (t, 0));
6699 case COMPOUND_EXPR:
6700 case MODIFY_EXPR:
6701 case BIND_EXPR:
6702 return integer_valued_real_p (TREE_OPERAND (t, 1));
6704 case PLUS_EXPR:
6705 case MINUS_EXPR:
6706 case MULT_EXPR:
6707 case MIN_EXPR:
6708 case MAX_EXPR:
6709 return integer_valued_real_p (TREE_OPERAND (t, 0))
6710 && integer_valued_real_p (TREE_OPERAND (t, 1));
6712 case COND_EXPR:
6713 return integer_valued_real_p (TREE_OPERAND (t, 1))
6714 && integer_valued_real_p (TREE_OPERAND (t, 2));
6716 case REAL_CST:
6717 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6719 case NOP_EXPR:
6721 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6722 if (TREE_CODE (type) == INTEGER_TYPE)
6723 return true;
6724 if (TREE_CODE (type) == REAL_TYPE)
6725 return integer_valued_real_p (TREE_OPERAND (t, 0));
6726 break;
6729 case CALL_EXPR:
6730 switch (builtin_mathfn_code (t))
6732 CASE_FLT_FN (BUILT_IN_CEIL):
6733 CASE_FLT_FN (BUILT_IN_FLOOR):
6734 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6735 CASE_FLT_FN (BUILT_IN_RINT):
6736 CASE_FLT_FN (BUILT_IN_ROUND):
6737 CASE_FLT_FN (BUILT_IN_TRUNC):
6738 return true;
6740 CASE_FLT_FN (BUILT_IN_FMIN):
6741 CASE_FLT_FN (BUILT_IN_FMAX):
6742 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6743 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6745 default:
6746 break;
6748 break;
6750 default:
6751 break;
6753 return false;
6756 /* FNDECL is assumed to be a builtin where truncation can be propagated
6757 across (for instance floor((double)f) == (double)floorf (f).
6758 Do the transformation for a call with argument ARG. */
6760 static tree
6761 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6763 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6765 if (!validate_arg (arg, REAL_TYPE))
6766 return NULL_TREE;
6768 /* Integer rounding functions are idempotent. */
6769 if (fcode == builtin_mathfn_code (arg))
6770 return arg;
6772 /* If argument is already integer valued, and we don't need to worry
6773 about setting errno, there's no need to perform rounding. */
6774 if (! flag_errno_math && integer_valued_real_p (arg))
6775 return arg;
6777 if (optimize)
6779 tree arg0 = strip_float_extensions (arg);
6780 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6781 tree newtype = TREE_TYPE (arg0);
6782 tree decl;
6784 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6785 && (decl = mathfn_built_in (newtype, fcode)))
6786 return fold_convert_loc (loc, ftype,
6787 build_call_expr_loc (loc, decl, 1,
6788 fold_convert_loc (loc,
6789 newtype,
6790 arg0)));
6792 return NULL_TREE;
6795 /* FNDECL is assumed to be builtin which can narrow the FP type of
6796 the argument, for instance lround((double)f) -> lroundf (f).
6797 Do the transformation for a call with argument ARG. */
6799 static tree
6800 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6802 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6804 if (!validate_arg (arg, REAL_TYPE))
6805 return NULL_TREE;
6807 /* If argument is already integer valued, and we don't need to worry
6808 about setting errno, there's no need to perform rounding. */
6809 if (! flag_errno_math && integer_valued_real_p (arg))
6810 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6811 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6813 if (optimize)
6815 tree ftype = TREE_TYPE (arg);
6816 tree arg0 = strip_float_extensions (arg);
6817 tree newtype = TREE_TYPE (arg0);
6818 tree decl;
6820 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6821 && (decl = mathfn_built_in (newtype, fcode)))
6822 return build_call_expr_loc (loc, decl, 1,
6823 fold_convert_loc (loc, newtype, arg0));
6826 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6827 sizeof (long long) == sizeof (long). */
6828 if (TYPE_PRECISION (long_long_integer_type_node)
6829 == TYPE_PRECISION (long_integer_type_node))
6831 tree newfn = NULL_TREE;
6832 switch (fcode)
6834 CASE_FLT_FN (BUILT_IN_LLCEIL):
6835 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6836 break;
6838 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6839 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6840 break;
6842 CASE_FLT_FN (BUILT_IN_LLROUND):
6843 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6844 break;
6846 CASE_FLT_FN (BUILT_IN_LLRINT):
6847 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6848 break;
6850 default:
6851 break;
6854 if (newfn)
6856 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6857 return fold_convert_loc (loc,
6858 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6862 return NULL_TREE;
6865 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6866 return type. Return NULL_TREE if no simplification can be made. */
6868 static tree
6869 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6871 tree res;
6873 if (!validate_arg (arg, COMPLEX_TYPE)
6874 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6875 return NULL_TREE;
6877 /* Calculate the result when the argument is a constant. */
6878 if (TREE_CODE (arg) == COMPLEX_CST
6879 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6880 type, mpfr_hypot)))
6881 return res;
6883 if (TREE_CODE (arg) == COMPLEX_EXPR)
6885 tree real = TREE_OPERAND (arg, 0);
6886 tree imag = TREE_OPERAND (arg, 1);
6888 /* If either part is zero, cabs is fabs of the other. */
6889 if (real_zerop (real))
6890 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6891 if (real_zerop (imag))
6892 return fold_build1_loc (loc, ABS_EXPR, type, real);
6894 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6895 if (flag_unsafe_math_optimizations
6896 && operand_equal_p (real, imag, OEP_PURE_SAME))
6898 const REAL_VALUE_TYPE sqrt2_trunc
6899 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6900 STRIP_NOPS (real);
6901 return fold_build2_loc (loc, MULT_EXPR, type,
6902 fold_build1_loc (loc, ABS_EXPR, type, real),
6903 build_real (type, sqrt2_trunc));
6907 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6908 if (TREE_CODE (arg) == NEGATE_EXPR
6909 || TREE_CODE (arg) == CONJ_EXPR)
6910 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6912 /* Don't do this when optimizing for size. */
6913 if (flag_unsafe_math_optimizations
6914 && optimize && optimize_function_for_speed_p (cfun))
6916 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6918 if (sqrtfn != NULL_TREE)
6920 tree rpart, ipart, result;
6922 arg = builtin_save_expr (arg);
6924 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6925 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6927 rpart = builtin_save_expr (rpart);
6928 ipart = builtin_save_expr (ipart);
6930 result = fold_build2_loc (loc, PLUS_EXPR, type,
6931 fold_build2_loc (loc, MULT_EXPR, type,
6932 rpart, rpart),
6933 fold_build2_loc (loc, MULT_EXPR, type,
6934 ipart, ipart));
6936 return build_call_expr_loc (loc, sqrtfn, 1, result);
6940 return NULL_TREE;
6943 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6944 Return NULL_TREE if no simplification can be made. */
6946 static tree
6947 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6950 enum built_in_function fcode;
6951 tree res;
6953 if (!validate_arg (arg, REAL_TYPE))
6954 return NULL_TREE;
6956 /* Calculate the result when the argument is a constant. */
6957 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6958 return res;
6960 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6961 fcode = builtin_mathfn_code (arg);
6962 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6964 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6965 arg = fold_build2_loc (loc, MULT_EXPR, type,
6966 CALL_EXPR_ARG (arg, 0),
6967 build_real (type, dconsthalf));
6968 return build_call_expr_loc (loc, expfn, 1, arg);
6971 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6972 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6974 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6976 if (powfn)
6978 tree arg0 = CALL_EXPR_ARG (arg, 0);
6979 tree tree_root;
6980 /* The inner root was either sqrt or cbrt. */
6981 /* This was a conditional expression but it triggered a bug
6982 in Sun C 5.5. */
6983 REAL_VALUE_TYPE dconstroot;
6984 if (BUILTIN_SQRT_P (fcode))
6985 dconstroot = dconsthalf;
6986 else
6987 dconstroot = dconst_third ();
6989 /* Adjust for the outer root. */
6990 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6991 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6992 tree_root = build_real (type, dconstroot);
6993 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6997 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6998 if (flag_unsafe_math_optimizations
6999 && (fcode == BUILT_IN_POW
7000 || fcode == BUILT_IN_POWF
7001 || fcode == BUILT_IN_POWL))
7003 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7004 tree arg0 = CALL_EXPR_ARG (arg, 0);
7005 tree arg1 = CALL_EXPR_ARG (arg, 1);
7006 tree narg1;
7007 if (!tree_expr_nonnegative_p (arg0))
7008 arg0 = build1 (ABS_EXPR, type, arg0);
7009 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7010 build_real (type, dconsthalf));
7011 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7014 return NULL_TREE;
7017 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7018 Return NULL_TREE if no simplification can be made. */
7020 static tree
7021 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7023 const enum built_in_function fcode = builtin_mathfn_code (arg);
7024 tree res;
7026 if (!validate_arg (arg, REAL_TYPE))
7027 return NULL_TREE;
7029 /* Calculate the result when the argument is a constant. */
7030 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7031 return res;
7033 if (flag_unsafe_math_optimizations)
7035 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7036 if (BUILTIN_EXPONENT_P (fcode))
7038 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7039 const REAL_VALUE_TYPE third_trunc =
7040 real_value_truncate (TYPE_MODE (type), dconst_third ());
7041 arg = fold_build2_loc (loc, MULT_EXPR, type,
7042 CALL_EXPR_ARG (arg, 0),
7043 build_real (type, third_trunc));
7044 return build_call_expr_loc (loc, expfn, 1, arg);
7047 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7048 if (BUILTIN_SQRT_P (fcode))
7050 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7052 if (powfn)
7054 tree arg0 = CALL_EXPR_ARG (arg, 0);
7055 tree tree_root;
7056 REAL_VALUE_TYPE dconstroot = dconst_third ();
7058 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7059 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7060 tree_root = build_real (type, dconstroot);
7061 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7065 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7066 if (BUILTIN_CBRT_P (fcode))
7068 tree arg0 = CALL_EXPR_ARG (arg, 0);
7069 if (tree_expr_nonnegative_p (arg0))
7071 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7073 if (powfn)
7075 tree tree_root;
7076 REAL_VALUE_TYPE dconstroot;
7078 real_arithmetic (&dconstroot, MULT_EXPR,
7079 dconst_third_ptr (), dconst_third_ptr ());
7080 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7081 tree_root = build_real (type, dconstroot);
7082 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7087 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7088 if (fcode == BUILT_IN_POW
7089 || fcode == BUILT_IN_POWF
7090 || fcode == BUILT_IN_POWL)
7092 tree arg00 = CALL_EXPR_ARG (arg, 0);
7093 tree arg01 = CALL_EXPR_ARG (arg, 1);
7094 if (tree_expr_nonnegative_p (arg00))
7096 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7097 const REAL_VALUE_TYPE dconstroot
7098 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7099 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7100 build_real (type, dconstroot));
7101 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7105 return NULL_TREE;
7108 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7109 TYPE is the type of the return value. Return NULL_TREE if no
7110 simplification can be made. */
7112 static tree
7113 fold_builtin_cos (location_t loc,
7114 tree arg, tree type, tree fndecl)
7116 tree res, narg;
7118 if (!validate_arg (arg, REAL_TYPE))
7119 return NULL_TREE;
7121 /* Calculate the result when the argument is a constant. */
7122 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7123 return res;
7125 /* Optimize cos(-x) into cos (x). */
7126 if ((narg = fold_strip_sign_ops (arg)))
7127 return build_call_expr_loc (loc, fndecl, 1, narg);
7129 return NULL_TREE;
7132 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7133 Return NULL_TREE if no simplification can be made. */
7135 static tree
7136 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7138 if (validate_arg (arg, REAL_TYPE))
7140 tree res, narg;
7142 /* Calculate the result when the argument is a constant. */
7143 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7144 return res;
7146 /* Optimize cosh(-x) into cosh (x). */
7147 if ((narg = fold_strip_sign_ops (arg)))
7148 return build_call_expr_loc (loc, fndecl, 1, narg);
7151 return NULL_TREE;
7154 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7155 argument ARG. TYPE is the type of the return value. Return
7156 NULL_TREE if no simplification can be made. */
7158 static tree
7159 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7160 bool hyper)
7162 if (validate_arg (arg, COMPLEX_TYPE)
7163 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7165 tree tmp;
7167 /* Calculate the result when the argument is a constant. */
7168 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7169 return tmp;
7171 /* Optimize fn(-x) into fn(x). */
7172 if ((tmp = fold_strip_sign_ops (arg)))
7173 return build_call_expr_loc (loc, fndecl, 1, tmp);
7176 return NULL_TREE;
7179 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7180 Return NULL_TREE if no simplification can be made. */
7182 static tree
7183 fold_builtin_tan (tree arg, tree type)
7185 enum built_in_function fcode;
7186 tree res;
7188 if (!validate_arg (arg, REAL_TYPE))
7189 return NULL_TREE;
7191 /* Calculate the result when the argument is a constant. */
7192 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7193 return res;
7195 /* Optimize tan(atan(x)) = x. */
7196 fcode = builtin_mathfn_code (arg);
7197 if (flag_unsafe_math_optimizations
7198 && (fcode == BUILT_IN_ATAN
7199 || fcode == BUILT_IN_ATANF
7200 || fcode == BUILT_IN_ATANL))
7201 return CALL_EXPR_ARG (arg, 0);
7203 return NULL_TREE;
7206 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7207 NULL_TREE if no simplification can be made. */
7209 static tree
7210 fold_builtin_sincos (location_t loc,
7211 tree arg0, tree arg1, tree arg2)
7213 tree type;
7214 tree res, fn, call;
7216 if (!validate_arg (arg0, REAL_TYPE)
7217 || !validate_arg (arg1, POINTER_TYPE)
7218 || !validate_arg (arg2, POINTER_TYPE))
7219 return NULL_TREE;
7221 type = TREE_TYPE (arg0);
7223 /* Calculate the result when the argument is a constant. */
7224 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7225 return res;
7227 /* Canonicalize sincos to cexpi. */
7228 if (!TARGET_C99_FUNCTIONS)
7229 return NULL_TREE;
7230 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7231 if (!fn)
7232 return NULL_TREE;
7234 call = build_call_expr_loc (loc, fn, 1, arg0);
7235 call = builtin_save_expr (call);
7237 return build2 (COMPOUND_EXPR, void_type_node,
7238 build2 (MODIFY_EXPR, void_type_node,
7239 build_fold_indirect_ref_loc (loc, arg1),
7240 build1 (IMAGPART_EXPR, type, call)),
7241 build2 (MODIFY_EXPR, void_type_node,
7242 build_fold_indirect_ref_loc (loc, arg2),
7243 build1 (REALPART_EXPR, type, call)));
7246 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7247 NULL_TREE if no simplification can be made. */
7249 static tree
7250 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7252 tree rtype;
7253 tree realp, imagp, ifn;
7254 tree res;
7256 if (!validate_arg (arg0, COMPLEX_TYPE)
7257 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7258 return NULL_TREE;
7260 /* Calculate the result when the argument is a constant. */
7261 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7262 return res;
7264 rtype = TREE_TYPE (TREE_TYPE (arg0));
7266 /* In case we can figure out the real part of arg0 and it is constant zero
7267 fold to cexpi. */
7268 if (!TARGET_C99_FUNCTIONS)
7269 return NULL_TREE;
7270 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7271 if (!ifn)
7272 return NULL_TREE;
7274 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7275 && real_zerop (realp))
7277 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7278 return build_call_expr_loc (loc, ifn, 1, narg);
7281 /* In case we can easily decompose real and imaginary parts split cexp
7282 to exp (r) * cexpi (i). */
7283 if (flag_unsafe_math_optimizations
7284 && realp)
7286 tree rfn, rcall, icall;
7288 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7289 if (!rfn)
7290 return NULL_TREE;
7292 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7293 if (!imagp)
7294 return NULL_TREE;
7296 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7297 icall = builtin_save_expr (icall);
7298 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7299 rcall = builtin_save_expr (rcall);
7300 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7301 fold_build2_loc (loc, MULT_EXPR, rtype,
7302 rcall,
7303 fold_build1_loc (loc, REALPART_EXPR,
7304 rtype, icall)),
7305 fold_build2_loc (loc, MULT_EXPR, rtype,
7306 rcall,
7307 fold_build1_loc (loc, IMAGPART_EXPR,
7308 rtype, icall)));
7311 return NULL_TREE;
7314 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7315 Return NULL_TREE if no simplification can be made. */
7317 static tree
7318 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7320 if (!validate_arg (arg, REAL_TYPE))
7321 return NULL_TREE;
7323 /* Optimize trunc of constant value. */
7324 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7326 REAL_VALUE_TYPE r, x;
7327 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7329 x = TREE_REAL_CST (arg);
7330 real_trunc (&r, TYPE_MODE (type), &x);
7331 return build_real (type, r);
7334 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7337 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7338 Return NULL_TREE if no simplification can be made. */
7340 static tree
7341 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7343 if (!validate_arg (arg, REAL_TYPE))
7344 return NULL_TREE;
7346 /* Optimize floor of constant value. */
7347 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7349 REAL_VALUE_TYPE x;
7351 x = TREE_REAL_CST (arg);
7352 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7354 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7355 REAL_VALUE_TYPE r;
7357 real_floor (&r, TYPE_MODE (type), &x);
7358 return build_real (type, r);
7362 /* Fold floor (x) where x is nonnegative to trunc (x). */
7363 if (tree_expr_nonnegative_p (arg))
7365 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7366 if (truncfn)
7367 return build_call_expr_loc (loc, truncfn, 1, arg);
7370 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7373 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7374 Return NULL_TREE if no simplification can be made. */
7376 static tree
7377 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7379 if (!validate_arg (arg, REAL_TYPE))
7380 return NULL_TREE;
7382 /* Optimize ceil of constant value. */
7383 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7385 REAL_VALUE_TYPE x;
7387 x = TREE_REAL_CST (arg);
7388 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7390 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7391 REAL_VALUE_TYPE r;
7393 real_ceil (&r, TYPE_MODE (type), &x);
7394 return build_real (type, r);
7398 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7401 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7402 Return NULL_TREE if no simplification can be made. */
7404 static tree
7405 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7407 if (!validate_arg (arg, REAL_TYPE))
7408 return NULL_TREE;
7410 /* Optimize round of constant value. */
7411 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7413 REAL_VALUE_TYPE x;
7415 x = TREE_REAL_CST (arg);
7416 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7418 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7419 REAL_VALUE_TYPE r;
7421 real_round (&r, TYPE_MODE (type), &x);
7422 return build_real (type, r);
7426 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7429 /* Fold function call to builtin lround, lroundf or lroundl (or the
7430 corresponding long long versions) and other rounding functions. ARG
7431 is the argument to the call. Return NULL_TREE if no simplification
7432 can be made. */
7434 static tree
7435 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7437 if (!validate_arg (arg, REAL_TYPE))
7438 return NULL_TREE;
7440 /* Optimize lround of constant value. */
7441 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7443 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7445 if (real_isfinite (&x))
7447 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7448 tree ftype = TREE_TYPE (arg);
7449 unsigned HOST_WIDE_INT lo2;
7450 HOST_WIDE_INT hi, lo;
7451 REAL_VALUE_TYPE r;
7453 switch (DECL_FUNCTION_CODE (fndecl))
7455 CASE_FLT_FN (BUILT_IN_LFLOOR):
7456 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7457 real_floor (&r, TYPE_MODE (ftype), &x);
7458 break;
7460 CASE_FLT_FN (BUILT_IN_LCEIL):
7461 CASE_FLT_FN (BUILT_IN_LLCEIL):
7462 real_ceil (&r, TYPE_MODE (ftype), &x);
7463 break;
7465 CASE_FLT_FN (BUILT_IN_LROUND):
7466 CASE_FLT_FN (BUILT_IN_LLROUND):
7467 real_round (&r, TYPE_MODE (ftype), &x);
7468 break;
7470 default:
7471 gcc_unreachable ();
7474 REAL_VALUE_TO_INT (&lo, &hi, r);
7475 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7476 return build_int_cst_wide (itype, lo2, hi);
7480 switch (DECL_FUNCTION_CODE (fndecl))
7482 CASE_FLT_FN (BUILT_IN_LFLOOR):
7483 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7484 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7485 if (tree_expr_nonnegative_p (arg))
7486 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7487 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7488 break;
7489 default:;
7492 return fold_fixed_mathfn (loc, fndecl, arg);
7495 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7496 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7497 the argument to the call. Return NULL_TREE if no simplification can
7498 be made. */
7500 static tree
7501 fold_builtin_bitop (tree fndecl, tree arg)
7503 if (!validate_arg (arg, INTEGER_TYPE))
7504 return NULL_TREE;
7506 /* Optimize for constant argument. */
7507 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7509 HOST_WIDE_INT hi, width, result;
7510 unsigned HOST_WIDE_INT lo;
7511 tree type;
7513 type = TREE_TYPE (arg);
7514 width = TYPE_PRECISION (type);
7515 lo = TREE_INT_CST_LOW (arg);
7517 /* Clear all the bits that are beyond the type's precision. */
7518 if (width > HOST_BITS_PER_WIDE_INT)
7520 hi = TREE_INT_CST_HIGH (arg);
7521 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7522 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7524 else
7526 hi = 0;
7527 if (width < HOST_BITS_PER_WIDE_INT)
7528 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7531 switch (DECL_FUNCTION_CODE (fndecl))
7533 CASE_INT_FN (BUILT_IN_FFS):
7534 if (lo != 0)
7535 result = exact_log2 (lo & -lo) + 1;
7536 else if (hi != 0)
7537 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7538 else
7539 result = 0;
7540 break;
7542 CASE_INT_FN (BUILT_IN_CLZ):
7543 if (hi != 0)
7544 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7545 else if (lo != 0)
7546 result = width - floor_log2 (lo) - 1;
7547 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7548 result = width;
7549 break;
7551 CASE_INT_FN (BUILT_IN_CTZ):
7552 if (lo != 0)
7553 result = exact_log2 (lo & -lo);
7554 else if (hi != 0)
7555 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7556 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7557 result = width;
7558 break;
7560 CASE_INT_FN (BUILT_IN_POPCOUNT):
7561 result = 0;
7562 while (lo)
7563 result++, lo &= lo - 1;
7564 while (hi)
7565 result++, hi &= hi - 1;
7566 break;
7568 CASE_INT_FN (BUILT_IN_PARITY):
7569 result = 0;
7570 while (lo)
7571 result++, lo &= lo - 1;
7572 while (hi)
7573 result++, hi &= hi - 1;
7574 result &= 1;
7575 break;
7577 default:
7578 gcc_unreachable ();
7581 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7584 return NULL_TREE;
7587 /* Fold function call to builtin_bswap and the long and long long
7588 variants. Return NULL_TREE if no simplification can be made. */
7589 static tree
7590 fold_builtin_bswap (tree fndecl, tree arg)
7592 if (! validate_arg (arg, INTEGER_TYPE))
7593 return NULL_TREE;
7595 /* Optimize constant value. */
7596 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7598 HOST_WIDE_INT hi, width, r_hi = 0;
7599 unsigned HOST_WIDE_INT lo, r_lo = 0;
7600 tree type;
7602 type = TREE_TYPE (arg);
7603 width = TYPE_PRECISION (type);
7604 lo = TREE_INT_CST_LOW (arg);
7605 hi = TREE_INT_CST_HIGH (arg);
7607 switch (DECL_FUNCTION_CODE (fndecl))
7609 case BUILT_IN_BSWAP32:
7610 case BUILT_IN_BSWAP64:
7612 int s;
7614 for (s = 0; s < width; s += 8)
7616 int d = width - s - 8;
7617 unsigned HOST_WIDE_INT byte;
7619 if (s < HOST_BITS_PER_WIDE_INT)
7620 byte = (lo >> s) & 0xff;
7621 else
7622 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7624 if (d < HOST_BITS_PER_WIDE_INT)
7625 r_lo |= byte << d;
7626 else
7627 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7631 break;
7633 default:
7634 gcc_unreachable ();
7637 if (width < HOST_BITS_PER_WIDE_INT)
7638 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7639 else
7640 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7643 return NULL_TREE;
7646 /* A subroutine of fold_builtin to fold the various logarithmic
7647 functions. Return NULL_TREE if no simplification can me made.
7648 FUNC is the corresponding MPFR logarithm function. */
7650 static tree
7651 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7652 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7654 if (validate_arg (arg, REAL_TYPE))
7656 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7657 tree res;
7658 const enum built_in_function fcode = builtin_mathfn_code (arg);
7660 /* Calculate the result when the argument is a constant. */
7661 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7662 return res;
7664 /* Special case, optimize logN(expN(x)) = x. */
7665 if (flag_unsafe_math_optimizations
7666 && ((func == mpfr_log
7667 && (fcode == BUILT_IN_EXP
7668 || fcode == BUILT_IN_EXPF
7669 || fcode == BUILT_IN_EXPL))
7670 || (func == mpfr_log2
7671 && (fcode == BUILT_IN_EXP2
7672 || fcode == BUILT_IN_EXP2F
7673 || fcode == BUILT_IN_EXP2L))
7674 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7675 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7677 /* Optimize logN(func()) for various exponential functions. We
7678 want to determine the value "x" and the power "exponent" in
7679 order to transform logN(x**exponent) into exponent*logN(x). */
7680 if (flag_unsafe_math_optimizations)
7682 tree exponent = 0, x = 0;
7684 switch (fcode)
7686 CASE_FLT_FN (BUILT_IN_EXP):
7687 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7688 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7689 dconst_e ()));
7690 exponent = CALL_EXPR_ARG (arg, 0);
7691 break;
7692 CASE_FLT_FN (BUILT_IN_EXP2):
7693 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7694 x = build_real (type, dconst2);
7695 exponent = CALL_EXPR_ARG (arg, 0);
7696 break;
7697 CASE_FLT_FN (BUILT_IN_EXP10):
7698 CASE_FLT_FN (BUILT_IN_POW10):
7699 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7701 REAL_VALUE_TYPE dconst10;
7702 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7703 x = build_real (type, dconst10);
7705 exponent = CALL_EXPR_ARG (arg, 0);
7706 break;
7707 CASE_FLT_FN (BUILT_IN_SQRT):
7708 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7709 x = CALL_EXPR_ARG (arg, 0);
7710 exponent = build_real (type, dconsthalf);
7711 break;
7712 CASE_FLT_FN (BUILT_IN_CBRT):
7713 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7714 x = CALL_EXPR_ARG (arg, 0);
7715 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7716 dconst_third ()));
7717 break;
7718 CASE_FLT_FN (BUILT_IN_POW):
7719 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7720 x = CALL_EXPR_ARG (arg, 0);
7721 exponent = CALL_EXPR_ARG (arg, 1);
7722 break;
7723 default:
7724 break;
7727 /* Now perform the optimization. */
7728 if (x && exponent)
7730 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7731 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7736 return NULL_TREE;
7739 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7740 NULL_TREE if no simplification can be made. */
7742 static tree
7743 fold_builtin_hypot (location_t loc, tree fndecl,
7744 tree arg0, tree arg1, tree type)
7746 tree res, narg0, narg1;
7748 if (!validate_arg (arg0, REAL_TYPE)
7749 || !validate_arg (arg1, REAL_TYPE))
7750 return NULL_TREE;
7752 /* Calculate the result when the argument is a constant. */
7753 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7754 return res;
7756 /* If either argument to hypot has a negate or abs, strip that off.
7757 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7758 narg0 = fold_strip_sign_ops (arg0);
7759 narg1 = fold_strip_sign_ops (arg1);
7760 if (narg0 || narg1)
7762 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7763 narg1 ? narg1 : arg1);
7766 /* If either argument is zero, hypot is fabs of the other. */
7767 if (real_zerop (arg0))
7768 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7769 else if (real_zerop (arg1))
7770 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7772 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7773 if (flag_unsafe_math_optimizations
7774 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7776 const REAL_VALUE_TYPE sqrt2_trunc
7777 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7778 return fold_build2_loc (loc, MULT_EXPR, type,
7779 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7780 build_real (type, sqrt2_trunc));
7783 return NULL_TREE;
7787 /* Fold a builtin function call to pow, powf, or powl. Return
7788 NULL_TREE if no simplification can be made. */
7789 static tree
7790 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7792 tree res;
7794 if (!validate_arg (arg0, REAL_TYPE)
7795 || !validate_arg (arg1, REAL_TYPE))
7796 return NULL_TREE;
7798 /* Calculate the result when the argument is a constant. */
7799 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7800 return res;
7802 /* Optimize pow(1.0,y) = 1.0. */
7803 if (real_onep (arg0))
7804 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7806 if (TREE_CODE (arg1) == REAL_CST
7807 && !TREE_OVERFLOW (arg1))
7809 REAL_VALUE_TYPE cint;
7810 REAL_VALUE_TYPE c;
7811 HOST_WIDE_INT n;
7813 c = TREE_REAL_CST (arg1);
7815 /* Optimize pow(x,0.0) = 1.0. */
7816 if (REAL_VALUES_EQUAL (c, dconst0))
7817 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7818 arg0);
7820 /* Optimize pow(x,1.0) = x. */
7821 if (REAL_VALUES_EQUAL (c, dconst1))
7822 return arg0;
7824 /* Optimize pow(x,-1.0) = 1.0/x. */
7825 if (REAL_VALUES_EQUAL (c, dconstm1))
7826 return fold_build2_loc (loc, RDIV_EXPR, type,
7827 build_real (type, dconst1), arg0);
7829 /* Optimize pow(x,0.5) = sqrt(x). */
7830 if (flag_unsafe_math_optimizations
7831 && REAL_VALUES_EQUAL (c, dconsthalf))
7833 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7835 if (sqrtfn != NULL_TREE)
7836 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7839 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7840 if (flag_unsafe_math_optimizations)
7842 const REAL_VALUE_TYPE dconstroot
7843 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7845 if (REAL_VALUES_EQUAL (c, dconstroot))
7847 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7848 if (cbrtfn != NULL_TREE)
7849 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7853 /* Check for an integer exponent. */
7854 n = real_to_integer (&c);
7855 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7856 if (real_identical (&c, &cint))
7858 /* Attempt to evaluate pow at compile-time, unless this should
7859 raise an exception. */
7860 if (TREE_CODE (arg0) == REAL_CST
7861 && !TREE_OVERFLOW (arg0)
7862 && (n > 0
7863 || (!flag_trapping_math && !flag_errno_math)
7864 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7866 REAL_VALUE_TYPE x;
7867 bool inexact;
7869 x = TREE_REAL_CST (arg0);
7870 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7871 if (flag_unsafe_math_optimizations || !inexact)
7872 return build_real (type, x);
7875 /* Strip sign ops from even integer powers. */
7876 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7878 tree narg0 = fold_strip_sign_ops (arg0);
7879 if (narg0)
7880 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7885 if (flag_unsafe_math_optimizations)
7887 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7889 /* Optimize pow(expN(x),y) = expN(x*y). */
7890 if (BUILTIN_EXPONENT_P (fcode))
7892 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7893 tree arg = CALL_EXPR_ARG (arg0, 0);
7894 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7895 return build_call_expr_loc (loc, expfn, 1, arg);
7898 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7899 if (BUILTIN_SQRT_P (fcode))
7901 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7902 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7903 build_real (type, dconsthalf));
7904 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7907 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7908 if (BUILTIN_CBRT_P (fcode))
7910 tree arg = CALL_EXPR_ARG (arg0, 0);
7911 if (tree_expr_nonnegative_p (arg))
7913 const REAL_VALUE_TYPE dconstroot
7914 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7915 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7916 build_real (type, dconstroot));
7917 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7921 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7922 if (fcode == BUILT_IN_POW
7923 || fcode == BUILT_IN_POWF
7924 || fcode == BUILT_IN_POWL)
7926 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7927 if (tree_expr_nonnegative_p (arg00))
7929 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7930 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7931 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7936 return NULL_TREE;
7939 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7940 Return NULL_TREE if no simplification can be made. */
7941 static tree
7942 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7943 tree arg0, tree arg1, tree type)
7945 if (!validate_arg (arg0, REAL_TYPE)
7946 || !validate_arg (arg1, INTEGER_TYPE))
7947 return NULL_TREE;
7949 /* Optimize pow(1.0,y) = 1.0. */
7950 if (real_onep (arg0))
7951 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7953 if (host_integerp (arg1, 0))
7955 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7957 /* Evaluate powi at compile-time. */
7958 if (TREE_CODE (arg0) == REAL_CST
7959 && !TREE_OVERFLOW (arg0))
7961 REAL_VALUE_TYPE x;
7962 x = TREE_REAL_CST (arg0);
7963 real_powi (&x, TYPE_MODE (type), &x, c);
7964 return build_real (type, x);
7967 /* Optimize pow(x,0) = 1.0. */
7968 if (c == 0)
7969 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7970 arg0);
7972 /* Optimize pow(x,1) = x. */
7973 if (c == 1)
7974 return arg0;
7976 /* Optimize pow(x,-1) = 1.0/x. */
7977 if (c == -1)
7978 return fold_build2_loc (loc, RDIV_EXPR, type,
7979 build_real (type, dconst1), arg0);
7982 return NULL_TREE;
7985 /* A subroutine of fold_builtin to fold the various exponent
7986 functions. Return NULL_TREE if no simplification can be made.
7987 FUNC is the corresponding MPFR exponent function. */
7989 static tree
7990 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7991 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7993 if (validate_arg (arg, REAL_TYPE))
7995 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7996 tree res;
7998 /* Calculate the result when the argument is a constant. */
7999 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8000 return res;
8002 /* Optimize expN(logN(x)) = x. */
8003 if (flag_unsafe_math_optimizations)
8005 const enum built_in_function fcode = builtin_mathfn_code (arg);
8007 if ((func == mpfr_exp
8008 && (fcode == BUILT_IN_LOG
8009 || fcode == BUILT_IN_LOGF
8010 || fcode == BUILT_IN_LOGL))
8011 || (func == mpfr_exp2
8012 && (fcode == BUILT_IN_LOG2
8013 || fcode == BUILT_IN_LOG2F
8014 || fcode == BUILT_IN_LOG2L))
8015 || (func == mpfr_exp10
8016 && (fcode == BUILT_IN_LOG10
8017 || fcode == BUILT_IN_LOG10F
8018 || fcode == BUILT_IN_LOG10L)))
8019 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8023 return NULL_TREE;
8026 /* Return true if VAR is a VAR_DECL or a component thereof. */
8028 static bool
8029 var_decl_component_p (tree var)
8031 tree inner = var;
8032 while (handled_component_p (inner))
8033 inner = TREE_OPERAND (inner, 0);
8034 return SSA_VAR_P (inner);
8037 /* Fold function call to builtin memset. Return
8038 NULL_TREE if no simplification can be made. */
8040 static tree
8041 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8042 tree type, bool ignore)
8044 tree var, ret, etype;
8045 unsigned HOST_WIDE_INT length, cval;
8047 if (! validate_arg (dest, POINTER_TYPE)
8048 || ! validate_arg (c, INTEGER_TYPE)
8049 || ! validate_arg (len, INTEGER_TYPE))
8050 return NULL_TREE;
8052 if (! host_integerp (len, 1))
8053 return NULL_TREE;
8055 /* If the LEN parameter is zero, return DEST. */
8056 if (integer_zerop (len))
8057 return omit_one_operand_loc (loc, type, dest, c);
8059 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8060 return NULL_TREE;
8062 var = dest;
8063 STRIP_NOPS (var);
8064 if (TREE_CODE (var) != ADDR_EXPR)
8065 return NULL_TREE;
8067 var = TREE_OPERAND (var, 0);
8068 if (TREE_THIS_VOLATILE (var))
8069 return NULL_TREE;
8071 etype = TREE_TYPE (var);
8072 if (TREE_CODE (etype) == ARRAY_TYPE)
8073 etype = TREE_TYPE (etype);
8075 if (!INTEGRAL_TYPE_P (etype)
8076 && !POINTER_TYPE_P (etype))
8077 return NULL_TREE;
8079 if (! var_decl_component_p (var))
8080 return NULL_TREE;
8082 length = tree_low_cst (len, 1);
8083 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8084 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8085 < (int) length)
8086 return NULL_TREE;
8088 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8089 return NULL_TREE;
8091 if (integer_zerop (c))
8092 cval = 0;
8093 else
8095 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8096 return NULL_TREE;
8098 cval = tree_low_cst (c, 1);
8099 cval &= 0xff;
8100 cval |= cval << 8;
8101 cval |= cval << 16;
8102 cval |= (cval << 31) << 1;
8105 ret = build_int_cst_type (etype, cval);
8106 var = build_fold_indirect_ref_loc (loc,
8107 fold_convert_loc (loc,
8108 build_pointer_type (etype),
8109 dest));
8110 ret = build2 (MODIFY_EXPR, etype, var, ret);
8111 if (ignore)
8112 return ret;
8114 return omit_one_operand_loc (loc, type, dest, ret);
8117 /* Fold function call to builtin memset. Return
8118 NULL_TREE if no simplification can be made. */
8120 static tree
8121 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8123 if (! validate_arg (dest, POINTER_TYPE)
8124 || ! validate_arg (size, INTEGER_TYPE))
8125 return NULL_TREE;
8127 if (!ignore)
8128 return NULL_TREE;
8130 /* New argument list transforming bzero(ptr x, int y) to
8131 memset(ptr x, int 0, size_t y). This is done this way
8132 so that if it isn't expanded inline, we fallback to
8133 calling bzero instead of memset. */
8135 return fold_builtin_memset (loc, dest, integer_zero_node,
8136 fold_convert_loc (loc, sizetype, size),
8137 void_type_node, ignore);
8140 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8141 NULL_TREE if no simplification can be made.
8142 If ENDP is 0, return DEST (like memcpy).
8143 If ENDP is 1, return DEST+LEN (like mempcpy).
8144 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8145 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8146 (memmove). */
8148 static tree
8149 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8150 tree len, tree type, bool ignore, int endp)
8152 tree destvar, srcvar, expr;
8154 if (! validate_arg (dest, POINTER_TYPE)
8155 || ! validate_arg (src, POINTER_TYPE)
8156 || ! validate_arg (len, INTEGER_TYPE))
8157 return NULL_TREE;
8159 /* If the LEN parameter is zero, return DEST. */
8160 if (integer_zerop (len))
8161 return omit_one_operand_loc (loc, type, dest, src);
8163 /* If SRC and DEST are the same (and not volatile), return
8164 DEST{,+LEN,+LEN-1}. */
8165 if (operand_equal_p (src, dest, 0))
8166 expr = len;
8167 else
8169 tree srctype, desttype;
8170 int src_align, dest_align;
8172 if (endp == 3)
8174 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8175 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8177 /* Both DEST and SRC must be pointer types.
8178 ??? This is what old code did. Is the testing for pointer types
8179 really mandatory?
8181 If either SRC is readonly or length is 1, we can use memcpy. */
8182 if (!dest_align || !src_align)
8183 return NULL_TREE;
8184 if (readonly_data_expr (src)
8185 || (host_integerp (len, 1)
8186 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8187 >= tree_low_cst (len, 1))))
8189 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8190 if (!fn)
8191 return NULL_TREE;
8192 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8195 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8196 srcvar = build_fold_indirect_ref_loc (loc, src);
8197 destvar = build_fold_indirect_ref_loc (loc, dest);
8198 if (srcvar
8199 && !TREE_THIS_VOLATILE (srcvar)
8200 && destvar
8201 && !TREE_THIS_VOLATILE (destvar))
8203 tree src_base, dest_base, fn;
8204 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8205 HOST_WIDE_INT size = -1;
8206 HOST_WIDE_INT maxsize = -1;
8208 src_base = srcvar;
8209 if (handled_component_p (src_base))
8210 src_base = get_ref_base_and_extent (src_base, &src_offset,
8211 &size, &maxsize);
8212 dest_base = destvar;
8213 if (handled_component_p (dest_base))
8214 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8215 &size, &maxsize);
8216 if (host_integerp (len, 1))
8218 maxsize = tree_low_cst (len, 1);
8219 if (maxsize
8220 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8221 maxsize = -1;
8222 else
8223 maxsize *= BITS_PER_UNIT;
8225 else
8226 maxsize = -1;
8227 if (SSA_VAR_P (src_base)
8228 && SSA_VAR_P (dest_base))
8230 if (operand_equal_p (src_base, dest_base, 0)
8231 && ranges_overlap_p (src_offset, maxsize,
8232 dest_offset, maxsize))
8233 return NULL_TREE;
8235 else if (TREE_CODE (src_base) == INDIRECT_REF
8236 && TREE_CODE (dest_base) == INDIRECT_REF)
8238 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8239 TREE_OPERAND (dest_base, 0), 0)
8240 || ranges_overlap_p (src_offset, maxsize,
8241 dest_offset, maxsize))
8242 return NULL_TREE;
8244 else
8245 return NULL_TREE;
8247 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8248 if (!fn)
8249 return NULL_TREE;
8250 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8252 return NULL_TREE;
8255 if (!host_integerp (len, 0))
8256 return NULL_TREE;
8257 /* FIXME:
8258 This logic lose for arguments like (type *)malloc (sizeof (type)),
8259 since we strip the casts of up to VOID return value from malloc.
8260 Perhaps we ought to inherit type from non-VOID argument here? */
8261 STRIP_NOPS (src);
8262 STRIP_NOPS (dest);
8263 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8264 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8266 tree tem = TREE_OPERAND (src, 0);
8267 STRIP_NOPS (tem);
8268 if (tem != TREE_OPERAND (src, 0))
8269 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8271 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8273 tree tem = TREE_OPERAND (dest, 0);
8274 STRIP_NOPS (tem);
8275 if (tem != TREE_OPERAND (dest, 0))
8276 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8278 srctype = TREE_TYPE (TREE_TYPE (src));
8279 if (srctype
8280 && TREE_CODE (srctype) == ARRAY_TYPE
8281 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8283 srctype = TREE_TYPE (srctype);
8284 STRIP_NOPS (src);
8285 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8287 desttype = TREE_TYPE (TREE_TYPE (dest));
8288 if (desttype
8289 && TREE_CODE (desttype) == ARRAY_TYPE
8290 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8292 desttype = TREE_TYPE (desttype);
8293 STRIP_NOPS (dest);
8294 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8296 if (!srctype || !desttype
8297 || !TYPE_SIZE_UNIT (srctype)
8298 || !TYPE_SIZE_UNIT (desttype)
8299 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8300 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8301 || TYPE_VOLATILE (srctype)
8302 || TYPE_VOLATILE (desttype))
8303 return NULL_TREE;
8305 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8306 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8307 if (dest_align < (int) TYPE_ALIGN (desttype)
8308 || src_align < (int) TYPE_ALIGN (srctype))
8309 return NULL_TREE;
8311 if (!ignore)
8312 dest = builtin_save_expr (dest);
8314 srcvar = NULL_TREE;
8315 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8317 srcvar = build_fold_indirect_ref_loc (loc, src);
8318 if (TREE_THIS_VOLATILE (srcvar))
8319 return NULL_TREE;
8320 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8321 srcvar = NULL_TREE;
8322 /* With memcpy, it is possible to bypass aliasing rules, so without
8323 this check i.e. execute/20060930-2.c would be misoptimized,
8324 because it use conflicting alias set to hold argument for the
8325 memcpy call. This check is probably unnecessary with
8326 -fno-strict-aliasing. Similarly for destvar. See also
8327 PR29286. */
8328 else if (!var_decl_component_p (srcvar))
8329 srcvar = NULL_TREE;
8332 destvar = NULL_TREE;
8333 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8335 destvar = build_fold_indirect_ref_loc (loc, dest);
8336 if (TREE_THIS_VOLATILE (destvar))
8337 return NULL_TREE;
8338 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8339 destvar = NULL_TREE;
8340 else if (!var_decl_component_p (destvar))
8341 destvar = NULL_TREE;
8344 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8345 return NULL_TREE;
8347 if (srcvar == NULL_TREE)
8349 tree srcptype;
8350 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8351 return NULL_TREE;
8353 srctype = build_qualified_type (desttype, 0);
8354 if (src_align < (int) TYPE_ALIGN (srctype))
8356 if (AGGREGATE_TYPE_P (srctype)
8357 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8358 return NULL_TREE;
8360 srctype = build_variant_type_copy (srctype);
8361 TYPE_ALIGN (srctype) = src_align;
8362 TYPE_USER_ALIGN (srctype) = 1;
8363 TYPE_PACKED (srctype) = 1;
8365 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8366 src = fold_convert_loc (loc, srcptype, src);
8367 srcvar = build_fold_indirect_ref_loc (loc, src);
8369 else if (destvar == NULL_TREE)
8371 tree destptype;
8372 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8373 return NULL_TREE;
8375 desttype = build_qualified_type (srctype, 0);
8376 if (dest_align < (int) TYPE_ALIGN (desttype))
8378 if (AGGREGATE_TYPE_P (desttype)
8379 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8380 return NULL_TREE;
8382 desttype = build_variant_type_copy (desttype);
8383 TYPE_ALIGN (desttype) = dest_align;
8384 TYPE_USER_ALIGN (desttype) = 1;
8385 TYPE_PACKED (desttype) = 1;
8387 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8388 dest = fold_convert_loc (loc, destptype, dest);
8389 destvar = build_fold_indirect_ref_loc (loc, dest);
8392 if (srctype == desttype
8393 || (gimple_in_ssa_p (cfun)
8394 && useless_type_conversion_p (desttype, srctype)))
8395 expr = srcvar;
8396 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8397 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8398 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8399 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8400 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8401 else
8402 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8403 TREE_TYPE (destvar), srcvar);
8404 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8407 if (ignore)
8408 return expr;
8410 if (endp == 0 || endp == 3)
8411 return omit_one_operand_loc (loc, type, dest, expr);
8413 if (expr == len)
8414 expr = NULL_TREE;
8416 if (endp == 2)
8417 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8418 ssize_int (1));
8420 len = fold_convert_loc (loc, sizetype, len);
8421 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8422 dest = fold_convert_loc (loc, type, dest);
8423 if (expr)
8424 dest = omit_one_operand_loc (loc, type, dest, expr);
8425 return dest;
8428 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8429 If LEN is not NULL, it represents the length of the string to be
8430 copied. Return NULL_TREE if no simplification can be made. */
8432 tree
8433 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8435 tree fn;
8437 if (!validate_arg (dest, POINTER_TYPE)
8438 || !validate_arg (src, POINTER_TYPE))
8439 return NULL_TREE;
8441 /* If SRC and DEST are the same (and not volatile), return DEST. */
8442 if (operand_equal_p (src, dest, 0))
8443 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8445 if (optimize_function_for_size_p (cfun))
8446 return NULL_TREE;
8448 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8449 if (!fn)
8450 return NULL_TREE;
8452 if (!len)
8454 len = c_strlen (src, 1);
8455 if (! len || TREE_SIDE_EFFECTS (len))
8456 return NULL_TREE;
8459 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8460 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8461 build_call_expr_loc (loc, fn, 3, dest, src, len));
8464 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8465 Return NULL_TREE if no simplification can be made. */
8467 static tree
8468 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8470 tree fn, len, lenp1, call, type;
8472 if (!validate_arg (dest, POINTER_TYPE)
8473 || !validate_arg (src, POINTER_TYPE))
8474 return NULL_TREE;
8476 len = c_strlen (src, 1);
8477 if (!len
8478 || TREE_CODE (len) != INTEGER_CST)
8479 return NULL_TREE;
8481 if (optimize_function_for_size_p (cfun)
8482 /* If length is zero it's small enough. */
8483 && !integer_zerop (len))
8484 return NULL_TREE;
8486 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8487 if (!fn)
8488 return NULL_TREE;
8490 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8491 /* We use dest twice in building our expression. Save it from
8492 multiple expansions. */
8493 dest = builtin_save_expr (dest);
8494 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8496 type = TREE_TYPE (TREE_TYPE (fndecl));
8497 len = fold_convert_loc (loc, sizetype, len);
8498 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8499 dest = fold_convert_loc (loc, type, dest);
8500 dest = omit_one_operand_loc (loc, type, dest, call);
8501 return dest;
8504 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8505 If SLEN is not NULL, it represents the length of the source string.
8506 Return NULL_TREE if no simplification can be made. */
8508 tree
8509 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8510 tree src, tree len, tree slen)
8512 tree fn;
8514 if (!validate_arg (dest, POINTER_TYPE)
8515 || !validate_arg (src, POINTER_TYPE)
8516 || !validate_arg (len, INTEGER_TYPE))
8517 return NULL_TREE;
8519 /* If the LEN parameter is zero, return DEST. */
8520 if (integer_zerop (len))
8521 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8523 /* We can't compare slen with len as constants below if len is not a
8524 constant. */
8525 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8526 return NULL_TREE;
8528 if (!slen)
8529 slen = c_strlen (src, 1);
8531 /* Now, we must be passed a constant src ptr parameter. */
8532 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8533 return NULL_TREE;
8535 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8537 /* We do not support simplification of this case, though we do
8538 support it when expanding trees into RTL. */
8539 /* FIXME: generate a call to __builtin_memset. */
8540 if (tree_int_cst_lt (slen, len))
8541 return NULL_TREE;
8543 /* OK transform into builtin memcpy. */
8544 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8545 if (!fn)
8546 return NULL_TREE;
8547 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8548 build_call_expr_loc (loc, fn, 3, dest, src, len));
8551 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8552 arguments to the call, and TYPE is its return type.
8553 Return NULL_TREE if no simplification can be made. */
8555 static tree
8556 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8558 if (!validate_arg (arg1, POINTER_TYPE)
8559 || !validate_arg (arg2, INTEGER_TYPE)
8560 || !validate_arg (len, INTEGER_TYPE))
8561 return NULL_TREE;
8562 else
8564 const char *p1;
8566 if (TREE_CODE (arg2) != INTEGER_CST
8567 || !host_integerp (len, 1))
8568 return NULL_TREE;
8570 p1 = c_getstr (arg1);
8571 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8573 char c;
8574 const char *r;
8575 tree tem;
8577 if (target_char_cast (arg2, &c))
8578 return NULL_TREE;
8580 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8582 if (r == NULL)
8583 return build_int_cst (TREE_TYPE (arg1), 0);
8585 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8586 size_int (r - p1));
8587 return fold_convert_loc (loc, type, tem);
8589 return NULL_TREE;
8593 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8594 Return NULL_TREE if no simplification can be made. */
8596 static tree
8597 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8599 const char *p1, *p2;
8601 if (!validate_arg (arg1, POINTER_TYPE)
8602 || !validate_arg (arg2, POINTER_TYPE)
8603 || !validate_arg (len, INTEGER_TYPE))
8604 return NULL_TREE;
8606 /* If the LEN parameter is zero, return zero. */
8607 if (integer_zerop (len))
8608 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8609 arg1, arg2);
8611 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8612 if (operand_equal_p (arg1, arg2, 0))
8613 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8615 p1 = c_getstr (arg1);
8616 p2 = c_getstr (arg2);
8618 /* If all arguments are constant, and the value of len is not greater
8619 than the lengths of arg1 and arg2, evaluate at compile-time. */
8620 if (host_integerp (len, 1) && p1 && p2
8621 && compare_tree_int (len, strlen (p1) + 1) <= 0
8622 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8624 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8626 if (r > 0)
8627 return integer_one_node;
8628 else if (r < 0)
8629 return integer_minus_one_node;
8630 else
8631 return integer_zero_node;
8634 /* If len parameter is one, return an expression corresponding to
8635 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8636 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8638 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8639 tree cst_uchar_ptr_node
8640 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8642 tree ind1
8643 = fold_convert_loc (loc, integer_type_node,
8644 build1 (INDIRECT_REF, cst_uchar_node,
8645 fold_convert_loc (loc,
8646 cst_uchar_ptr_node,
8647 arg1)));
8648 tree ind2
8649 = fold_convert_loc (loc, integer_type_node,
8650 build1 (INDIRECT_REF, cst_uchar_node,
8651 fold_convert_loc (loc,
8652 cst_uchar_ptr_node,
8653 arg2)));
8654 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8657 return NULL_TREE;
8660 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8661 Return NULL_TREE if no simplification can be made. */
8663 static tree
8664 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8666 const char *p1, *p2;
8668 if (!validate_arg (arg1, POINTER_TYPE)
8669 || !validate_arg (arg2, POINTER_TYPE))
8670 return NULL_TREE;
8672 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8673 if (operand_equal_p (arg1, arg2, 0))
8674 return integer_zero_node;
8676 p1 = c_getstr (arg1);
8677 p2 = c_getstr (arg2);
8679 if (p1 && p2)
8681 const int i = strcmp (p1, p2);
8682 if (i < 0)
8683 return integer_minus_one_node;
8684 else if (i > 0)
8685 return integer_one_node;
8686 else
8687 return integer_zero_node;
8690 /* If the second arg is "", return *(const unsigned char*)arg1. */
8691 if (p2 && *p2 == '\0')
8693 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8694 tree cst_uchar_ptr_node
8695 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8697 return fold_convert_loc (loc, integer_type_node,
8698 build1 (INDIRECT_REF, cst_uchar_node,
8699 fold_convert_loc (loc,
8700 cst_uchar_ptr_node,
8701 arg1)));
8704 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8705 if (p1 && *p1 == '\0')
8707 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8708 tree cst_uchar_ptr_node
8709 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8711 tree temp
8712 = fold_convert_loc (loc, integer_type_node,
8713 build1 (INDIRECT_REF, cst_uchar_node,
8714 fold_convert_loc (loc,
8715 cst_uchar_ptr_node,
8716 arg2)));
8717 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8720 return NULL_TREE;
8723 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8724 Return NULL_TREE if no simplification can be made. */
8726 static tree
8727 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8729 const char *p1, *p2;
8731 if (!validate_arg (arg1, POINTER_TYPE)
8732 || !validate_arg (arg2, POINTER_TYPE)
8733 || !validate_arg (len, INTEGER_TYPE))
8734 return NULL_TREE;
8736 /* If the LEN parameter is zero, return zero. */
8737 if (integer_zerop (len))
8738 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8739 arg1, arg2);
8741 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8742 if (operand_equal_p (arg1, arg2, 0))
8743 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8745 p1 = c_getstr (arg1);
8746 p2 = c_getstr (arg2);
8748 if (host_integerp (len, 1) && p1 && p2)
8750 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8751 if (i > 0)
8752 return integer_one_node;
8753 else if (i < 0)
8754 return integer_minus_one_node;
8755 else
8756 return integer_zero_node;
8759 /* If the second arg is "", and the length is greater than zero,
8760 return *(const unsigned char*)arg1. */
8761 if (p2 && *p2 == '\0'
8762 && TREE_CODE (len) == INTEGER_CST
8763 && tree_int_cst_sgn (len) == 1)
8765 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8766 tree cst_uchar_ptr_node
8767 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8769 return fold_convert_loc (loc, integer_type_node,
8770 build1 (INDIRECT_REF, cst_uchar_node,
8771 fold_convert_loc (loc,
8772 cst_uchar_ptr_node,
8773 arg1)));
8776 /* If the first arg is "", and the length is greater than zero,
8777 return -*(const unsigned char*)arg2. */
8778 if (p1 && *p1 == '\0'
8779 && TREE_CODE (len) == INTEGER_CST
8780 && tree_int_cst_sgn (len) == 1)
8782 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8783 tree cst_uchar_ptr_node
8784 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8786 tree temp = fold_convert_loc (loc, integer_type_node,
8787 build1 (INDIRECT_REF, cst_uchar_node,
8788 fold_convert_loc (loc,
8789 cst_uchar_ptr_node,
8790 arg2)));
8791 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8794 /* If len parameter is one, return an expression corresponding to
8795 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8796 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8798 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8799 tree cst_uchar_ptr_node
8800 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8802 tree ind1 = fold_convert_loc (loc, integer_type_node,
8803 build1 (INDIRECT_REF, cst_uchar_node,
8804 fold_convert_loc (loc,
8805 cst_uchar_ptr_node,
8806 arg1)));
8807 tree ind2 = fold_convert_loc (loc, integer_type_node,
8808 build1 (INDIRECT_REF, cst_uchar_node,
8809 fold_convert_loc (loc,
8810 cst_uchar_ptr_node,
8811 arg2)));
8812 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8815 return NULL_TREE;
8818 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8819 ARG. Return NULL_TREE if no simplification can be made. */
8821 static tree
8822 fold_builtin_signbit (location_t loc, tree arg, tree type)
8824 tree temp;
8826 if (!validate_arg (arg, REAL_TYPE))
8827 return NULL_TREE;
8829 /* If ARG is a compile-time constant, determine the result. */
8830 if (TREE_CODE (arg) == REAL_CST
8831 && !TREE_OVERFLOW (arg))
8833 REAL_VALUE_TYPE c;
8835 c = TREE_REAL_CST (arg);
8836 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8837 return fold_convert_loc (loc, type, temp);
8840 /* If ARG is non-negative, the result is always zero. */
8841 if (tree_expr_nonnegative_p (arg))
8842 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8844 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8845 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8846 return fold_build2_loc (loc, LT_EXPR, type, arg,
8847 build_real (TREE_TYPE (arg), dconst0));
8849 return NULL_TREE;
8852 /* Fold function call to builtin copysign, copysignf or copysignl with
8853 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8854 be made. */
8856 static tree
8857 fold_builtin_copysign (location_t loc, tree fndecl,
8858 tree arg1, tree arg2, tree type)
8860 tree tem;
8862 if (!validate_arg (arg1, REAL_TYPE)
8863 || !validate_arg (arg2, REAL_TYPE))
8864 return NULL_TREE;
8866 /* copysign(X,X) is X. */
8867 if (operand_equal_p (arg1, arg2, 0))
8868 return fold_convert_loc (loc, type, arg1);
8870 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8871 if (TREE_CODE (arg1) == REAL_CST
8872 && TREE_CODE (arg2) == REAL_CST
8873 && !TREE_OVERFLOW (arg1)
8874 && !TREE_OVERFLOW (arg2))
8876 REAL_VALUE_TYPE c1, c2;
8878 c1 = TREE_REAL_CST (arg1);
8879 c2 = TREE_REAL_CST (arg2);
8880 /* c1.sign := c2.sign. */
8881 real_copysign (&c1, &c2);
8882 return build_real (type, c1);
8885 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8886 Remember to evaluate Y for side-effects. */
8887 if (tree_expr_nonnegative_p (arg2))
8888 return omit_one_operand_loc (loc, type,
8889 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8890 arg2);
8892 /* Strip sign changing operations for the first argument. */
8893 tem = fold_strip_sign_ops (arg1);
8894 if (tem)
8895 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8897 return NULL_TREE;
8900 /* Fold a call to builtin isascii with argument ARG. */
8902 static tree
8903 fold_builtin_isascii (location_t loc, tree arg)
8905 if (!validate_arg (arg, INTEGER_TYPE))
8906 return NULL_TREE;
8907 else
8909 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8910 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8911 build_int_cst (NULL_TREE,
8912 ~ (unsigned HOST_WIDE_INT) 0x7f));
8913 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8914 arg, integer_zero_node);
8918 /* Fold a call to builtin toascii with argument ARG. */
8920 static tree
8921 fold_builtin_toascii (location_t loc, tree arg)
8923 if (!validate_arg (arg, INTEGER_TYPE))
8924 return NULL_TREE;
8926 /* Transform toascii(c) -> (c & 0x7f). */
8927 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8928 build_int_cst (NULL_TREE, 0x7f));
8931 /* Fold a call to builtin isdigit with argument ARG. */
8933 static tree
8934 fold_builtin_isdigit (location_t loc, tree arg)
8936 if (!validate_arg (arg, INTEGER_TYPE))
8937 return NULL_TREE;
8938 else
8940 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8941 /* According to the C standard, isdigit is unaffected by locale.
8942 However, it definitely is affected by the target character set. */
8943 unsigned HOST_WIDE_INT target_digit0
8944 = lang_hooks.to_target_charset ('0');
8946 if (target_digit0 == 0)
8947 return NULL_TREE;
8949 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8950 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8951 build_int_cst (unsigned_type_node, target_digit0));
8952 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8953 build_int_cst (unsigned_type_node, 9));
8957 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8959 static tree
8960 fold_builtin_fabs (location_t loc, tree arg, tree type)
8962 if (!validate_arg (arg, REAL_TYPE))
8963 return NULL_TREE;
8965 arg = fold_convert_loc (loc, type, arg);
8966 if (TREE_CODE (arg) == REAL_CST)
8967 return fold_abs_const (arg, type);
8968 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8971 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8973 static tree
8974 fold_builtin_abs (location_t loc, tree arg, tree type)
8976 if (!validate_arg (arg, INTEGER_TYPE))
8977 return NULL_TREE;
8979 arg = fold_convert_loc (loc, type, arg);
8980 if (TREE_CODE (arg) == INTEGER_CST)
8981 return fold_abs_const (arg, type);
8982 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8985 /* Fold a call to builtin fmin or fmax. */
8987 static tree
8988 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8989 tree type, bool max)
8991 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8993 /* Calculate the result when the argument is a constant. */
8994 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8996 if (res)
8997 return res;
8999 /* If either argument is NaN, return the other one. Avoid the
9000 transformation if we get (and honor) a signalling NaN. Using
9001 omit_one_operand() ensures we create a non-lvalue. */
9002 if (TREE_CODE (arg0) == REAL_CST
9003 && real_isnan (&TREE_REAL_CST (arg0))
9004 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9005 || ! TREE_REAL_CST (arg0).signalling))
9006 return omit_one_operand_loc (loc, type, arg1, arg0);
9007 if (TREE_CODE (arg1) == REAL_CST
9008 && real_isnan (&TREE_REAL_CST (arg1))
9009 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9010 || ! TREE_REAL_CST (arg1).signalling))
9011 return omit_one_operand_loc (loc, type, arg0, arg1);
9013 /* Transform fmin/fmax(x,x) -> x. */
9014 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9015 return omit_one_operand_loc (loc, type, arg0, arg1);
9017 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9018 functions to return the numeric arg if the other one is NaN.
9019 These tree codes don't honor that, so only transform if
9020 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9021 handled, so we don't have to worry about it either. */
9022 if (flag_finite_math_only)
9023 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9024 fold_convert_loc (loc, type, arg0),
9025 fold_convert_loc (loc, type, arg1));
9027 return NULL_TREE;
9030 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9032 static tree
9033 fold_builtin_carg (location_t loc, tree arg, tree type)
9035 if (validate_arg (arg, COMPLEX_TYPE)
9036 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9038 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9040 if (atan2_fn)
9042 tree new_arg = builtin_save_expr (arg);
9043 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9044 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9045 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9049 return NULL_TREE;
9052 /* Fold a call to builtin logb/ilogb. */
9054 static tree
9055 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9057 if (! validate_arg (arg, REAL_TYPE))
9058 return NULL_TREE;
9060 STRIP_NOPS (arg);
9062 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9064 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9066 switch (value->cl)
9068 case rvc_nan:
9069 case rvc_inf:
9070 /* If arg is Inf or NaN and we're logb, return it. */
9071 if (TREE_CODE (rettype) == REAL_TYPE)
9072 return fold_convert_loc (loc, rettype, arg);
9073 /* Fall through... */
9074 case rvc_zero:
9075 /* Zero may set errno and/or raise an exception for logb, also
9076 for ilogb we don't know FP_ILOGB0. */
9077 return NULL_TREE;
9078 case rvc_normal:
9079 /* For normal numbers, proceed iff radix == 2. In GCC,
9080 normalized significands are in the range [0.5, 1.0). We
9081 want the exponent as if they were [1.0, 2.0) so get the
9082 exponent and subtract 1. */
9083 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9084 return fold_convert_loc (loc, rettype,
9085 build_int_cst (NULL_TREE,
9086 REAL_EXP (value)-1));
9087 break;
9091 return NULL_TREE;
9094 /* Fold a call to builtin significand, if radix == 2. */
9096 static tree
9097 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9099 if (! validate_arg (arg, REAL_TYPE))
9100 return NULL_TREE;
9102 STRIP_NOPS (arg);
9104 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9106 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9108 switch (value->cl)
9110 case rvc_zero:
9111 case rvc_nan:
9112 case rvc_inf:
9113 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9114 return fold_convert_loc (loc, rettype, arg);
9115 case rvc_normal:
9116 /* For normal numbers, proceed iff radix == 2. */
9117 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9119 REAL_VALUE_TYPE result = *value;
9120 /* In GCC, normalized significands are in the range [0.5,
9121 1.0). We want them to be [1.0, 2.0) so set the
9122 exponent to 1. */
9123 SET_REAL_EXP (&result, 1);
9124 return build_real (rettype, result);
9126 break;
9130 return NULL_TREE;
9133 /* Fold a call to builtin frexp, we can assume the base is 2. */
9135 static tree
9136 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9138 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9139 return NULL_TREE;
9141 STRIP_NOPS (arg0);
9143 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9144 return NULL_TREE;
9146 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9148 /* Proceed if a valid pointer type was passed in. */
9149 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9151 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9152 tree frac, exp;
9154 switch (value->cl)
9156 case rvc_zero:
9157 /* For +-0, return (*exp = 0, +-0). */
9158 exp = integer_zero_node;
9159 frac = arg0;
9160 break;
9161 case rvc_nan:
9162 case rvc_inf:
9163 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9164 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9165 case rvc_normal:
9167 /* Since the frexp function always expects base 2, and in
9168 GCC normalized significands are already in the range
9169 [0.5, 1.0), we have exactly what frexp wants. */
9170 REAL_VALUE_TYPE frac_rvt = *value;
9171 SET_REAL_EXP (&frac_rvt, 0);
9172 frac = build_real (rettype, frac_rvt);
9173 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9175 break;
9176 default:
9177 gcc_unreachable ();
9180 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9181 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9182 TREE_SIDE_EFFECTS (arg1) = 1;
9183 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9186 return NULL_TREE;
9189 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9190 then we can assume the base is two. If it's false, then we have to
9191 check the mode of the TYPE parameter in certain cases. */
9193 static tree
9194 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9195 tree type, bool ldexp)
9197 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9199 STRIP_NOPS (arg0);
9200 STRIP_NOPS (arg1);
9202 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9203 if (real_zerop (arg0) || integer_zerop (arg1)
9204 || (TREE_CODE (arg0) == REAL_CST
9205 && !real_isfinite (&TREE_REAL_CST (arg0))))
9206 return omit_one_operand_loc (loc, type, arg0, arg1);
9208 /* If both arguments are constant, then try to evaluate it. */
9209 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9210 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9211 && host_integerp (arg1, 0))
9213 /* Bound the maximum adjustment to twice the range of the
9214 mode's valid exponents. Use abs to ensure the range is
9215 positive as a sanity check. */
9216 const long max_exp_adj = 2 *
9217 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9218 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9220 /* Get the user-requested adjustment. */
9221 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9223 /* The requested adjustment must be inside this range. This
9224 is a preliminary cap to avoid things like overflow, we
9225 may still fail to compute the result for other reasons. */
9226 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9228 REAL_VALUE_TYPE initial_result;
9230 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9232 /* Ensure we didn't overflow. */
9233 if (! real_isinf (&initial_result))
9235 const REAL_VALUE_TYPE trunc_result
9236 = real_value_truncate (TYPE_MODE (type), initial_result);
9238 /* Only proceed if the target mode can hold the
9239 resulting value. */
9240 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9241 return build_real (type, trunc_result);
9247 return NULL_TREE;
9250 /* Fold a call to builtin modf. */
9252 static tree
9253 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9255 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9256 return NULL_TREE;
9258 STRIP_NOPS (arg0);
9260 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9261 return NULL_TREE;
9263 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9265 /* Proceed if a valid pointer type was passed in. */
9266 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9268 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9269 REAL_VALUE_TYPE trunc, frac;
9271 switch (value->cl)
9273 case rvc_nan:
9274 case rvc_zero:
9275 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9276 trunc = frac = *value;
9277 break;
9278 case rvc_inf:
9279 /* For +-Inf, return (*arg1 = arg0, +-0). */
9280 frac = dconst0;
9281 frac.sign = value->sign;
9282 trunc = *value;
9283 break;
9284 case rvc_normal:
9285 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9286 real_trunc (&trunc, VOIDmode, value);
9287 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9288 /* If the original number was negative and already
9289 integral, then the fractional part is -0.0. */
9290 if (value->sign && frac.cl == rvc_zero)
9291 frac.sign = value->sign;
9292 break;
9295 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9296 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9297 build_real (rettype, trunc));
9298 TREE_SIDE_EFFECTS (arg1) = 1;
9299 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9300 build_real (rettype, frac));
9303 return NULL_TREE;
9306 /* Given a location LOC, an interclass builtin function decl FNDECL
9307 and its single argument ARG, return an folded expression computing
9308 the same, or NULL_TREE if we either couldn't or didn't want to fold
9309 (the latter happen if there's an RTL instruction available). */
9311 static tree
9312 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9314 enum machine_mode mode;
9316 if (!validate_arg (arg, REAL_TYPE))
9317 return NULL_TREE;
9319 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9320 return NULL_TREE;
9322 mode = TYPE_MODE (TREE_TYPE (arg));
9324 /* If there is no optab, try generic code. */
9325 switch (DECL_FUNCTION_CODE (fndecl))
9327 tree result;
9329 CASE_FLT_FN (BUILT_IN_ISINF):
9331 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9332 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9333 tree const type = TREE_TYPE (arg);
9334 REAL_VALUE_TYPE r;
9335 char buf[128];
9337 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9338 real_from_string (&r, buf);
9339 result = build_call_expr (isgr_fn, 2,
9340 fold_build1_loc (loc, ABS_EXPR, type, arg),
9341 build_real (type, r));
9342 return result;
9344 CASE_FLT_FN (BUILT_IN_FINITE):
9345 case BUILT_IN_ISFINITE:
9347 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9348 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9349 tree const type = TREE_TYPE (arg);
9350 REAL_VALUE_TYPE r;
9351 char buf[128];
9353 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9354 real_from_string (&r, buf);
9355 result = build_call_expr (isle_fn, 2,
9356 fold_build1_loc (loc, ABS_EXPR, type, arg),
9357 build_real (type, r));
9358 /*result = fold_build2_loc (loc, UNGT_EXPR,
9359 TREE_TYPE (TREE_TYPE (fndecl)),
9360 fold_build1_loc (loc, ABS_EXPR, type, arg),
9361 build_real (type, r));
9362 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9363 TREE_TYPE (TREE_TYPE (fndecl)),
9364 result);*/
9365 return result;
9367 case BUILT_IN_ISNORMAL:
9369 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9370 islessequal(fabs(x),DBL_MAX). */
9371 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9372 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9373 tree const type = TREE_TYPE (arg);
9374 REAL_VALUE_TYPE rmax, rmin;
9375 char buf[128];
9377 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9378 real_from_string (&rmax, buf);
9379 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9380 real_from_string (&rmin, buf);
9381 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9382 result = build_call_expr (isle_fn, 2, arg,
9383 build_real (type, rmax));
9384 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9385 build_call_expr (isge_fn, 2, arg,
9386 build_real (type, rmin)));
9387 return result;
9389 default:
9390 break;
9393 return NULL_TREE;
9396 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9397 ARG is the argument for the call. */
9399 static tree
9400 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9402 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9403 REAL_VALUE_TYPE r;
9405 if (!validate_arg (arg, REAL_TYPE))
9406 return NULL_TREE;
9408 switch (builtin_index)
9410 case BUILT_IN_ISINF:
9411 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9412 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9414 if (TREE_CODE (arg) == REAL_CST)
9416 r = TREE_REAL_CST (arg);
9417 if (real_isinf (&r))
9418 return real_compare (GT_EXPR, &r, &dconst0)
9419 ? integer_one_node : integer_minus_one_node;
9420 else
9421 return integer_zero_node;
9424 return NULL_TREE;
9426 case BUILT_IN_ISINF_SIGN:
9428 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9429 /* In a boolean context, GCC will fold the inner COND_EXPR to
9430 1. So e.g. "if (isinf_sign(x))" would be folded to just
9431 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9432 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9433 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9434 tree tmp = NULL_TREE;
9436 arg = builtin_save_expr (arg);
9438 if (signbit_fn && isinf_fn)
9440 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9441 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9443 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9444 signbit_call, integer_zero_node);
9445 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9446 isinf_call, integer_zero_node);
9448 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9449 integer_minus_one_node, integer_one_node);
9450 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9451 isinf_call, tmp,
9452 integer_zero_node);
9455 return tmp;
9458 case BUILT_IN_ISFINITE:
9459 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9460 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9461 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9463 if (TREE_CODE (arg) == REAL_CST)
9465 r = TREE_REAL_CST (arg);
9466 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9469 return NULL_TREE;
9471 case BUILT_IN_ISNAN:
9472 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9473 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9475 if (TREE_CODE (arg) == REAL_CST)
9477 r = TREE_REAL_CST (arg);
9478 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9481 arg = builtin_save_expr (arg);
9482 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9484 default:
9485 gcc_unreachable ();
9489 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9490 This builtin will generate code to return the appropriate floating
9491 point classification depending on the value of the floating point
9492 number passed in. The possible return values must be supplied as
9493 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9494 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9495 one floating point argument which is "type generic". */
9497 static tree
9498 fold_builtin_fpclassify (location_t loc, tree exp)
9500 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9501 arg, type, res, tmp;
9502 enum machine_mode mode;
9503 REAL_VALUE_TYPE r;
9504 char buf[128];
9506 /* Verify the required arguments in the original call. */
9507 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9508 INTEGER_TYPE, INTEGER_TYPE,
9509 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9510 return NULL_TREE;
9512 fp_nan = CALL_EXPR_ARG (exp, 0);
9513 fp_infinite = CALL_EXPR_ARG (exp, 1);
9514 fp_normal = CALL_EXPR_ARG (exp, 2);
9515 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9516 fp_zero = CALL_EXPR_ARG (exp, 4);
9517 arg = CALL_EXPR_ARG (exp, 5);
9518 type = TREE_TYPE (arg);
9519 mode = TYPE_MODE (type);
9520 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9522 /* fpclassify(x) ->
9523 isnan(x) ? FP_NAN :
9524 (fabs(x) == Inf ? FP_INFINITE :
9525 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9526 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9528 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9529 build_real (type, dconst0));
9530 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9531 tmp, fp_zero, fp_subnormal);
9533 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9534 real_from_string (&r, buf);
9535 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9536 arg, build_real (type, r));
9537 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9539 if (HONOR_INFINITIES (mode))
9541 real_inf (&r);
9542 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9543 build_real (type, r));
9544 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9545 fp_infinite, res);
9548 if (HONOR_NANS (mode))
9550 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9551 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9554 return res;
9557 /* Fold a call to an unordered comparison function such as
9558 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9559 being called and ARG0 and ARG1 are the arguments for the call.
9560 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9561 the opposite of the desired result. UNORDERED_CODE is used
9562 for modes that can hold NaNs and ORDERED_CODE is used for
9563 the rest. */
9565 static tree
9566 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9567 enum tree_code unordered_code,
9568 enum tree_code ordered_code)
9570 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9571 enum tree_code code;
9572 tree type0, type1;
9573 enum tree_code code0, code1;
9574 tree cmp_type = NULL_TREE;
9576 type0 = TREE_TYPE (arg0);
9577 type1 = TREE_TYPE (arg1);
9579 code0 = TREE_CODE (type0);
9580 code1 = TREE_CODE (type1);
9582 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9583 /* Choose the wider of two real types. */
9584 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9585 ? type0 : type1;
9586 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9587 cmp_type = type0;
9588 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9589 cmp_type = type1;
9591 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9592 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9594 if (unordered_code == UNORDERED_EXPR)
9596 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9597 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9598 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9601 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9602 : ordered_code;
9603 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9604 fold_build2_loc (loc, code, type, arg0, arg1));
9607 /* Fold a call to built-in function FNDECL with 0 arguments.
9608 IGNORE is true if the result of the function call is ignored. This
9609 function returns NULL_TREE if no simplification was possible. */
9611 static tree
9612 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9614 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9615 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9616 switch (fcode)
9618 CASE_FLT_FN (BUILT_IN_INF):
9619 case BUILT_IN_INFD32:
9620 case BUILT_IN_INFD64:
9621 case BUILT_IN_INFD128:
9622 return fold_builtin_inf (loc, type, true);
9624 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9625 return fold_builtin_inf (loc, type, false);
9627 case BUILT_IN_CLASSIFY_TYPE:
9628 return fold_builtin_classify_type (NULL_TREE);
9630 default:
9631 break;
9633 return NULL_TREE;
9636 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9637 IGNORE is true if the result of the function call is ignored. This
9638 function returns NULL_TREE if no simplification was possible. */
9640 static tree
9641 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9643 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9644 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9645 switch (fcode)
9648 case BUILT_IN_CONSTANT_P:
9650 tree val = fold_builtin_constant_p (arg0);
9652 /* Gimplification will pull the CALL_EXPR for the builtin out of
9653 an if condition. When not optimizing, we'll not CSE it back.
9654 To avoid link error types of regressions, return false now. */
9655 if (!val && !optimize)
9656 val = integer_zero_node;
9658 return val;
9661 case BUILT_IN_CLASSIFY_TYPE:
9662 return fold_builtin_classify_type (arg0);
9664 case BUILT_IN_STRLEN:
9665 return fold_builtin_strlen (loc, type, arg0);
9667 CASE_FLT_FN (BUILT_IN_FABS):
9668 return fold_builtin_fabs (loc, arg0, type);
9670 case BUILT_IN_ABS:
9671 case BUILT_IN_LABS:
9672 case BUILT_IN_LLABS:
9673 case BUILT_IN_IMAXABS:
9674 return fold_builtin_abs (loc, arg0, type);
9676 CASE_FLT_FN (BUILT_IN_CONJ):
9677 if (validate_arg (arg0, COMPLEX_TYPE)
9678 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9679 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9680 break;
9682 CASE_FLT_FN (BUILT_IN_CREAL):
9683 if (validate_arg (arg0, COMPLEX_TYPE)
9684 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9685 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9686 break;
9688 CASE_FLT_FN (BUILT_IN_CIMAG):
9689 if (validate_arg (arg0, COMPLEX_TYPE)
9690 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9691 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9692 break;
9694 CASE_FLT_FN (BUILT_IN_CCOS):
9695 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9697 CASE_FLT_FN (BUILT_IN_CCOSH):
9698 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9700 CASE_FLT_FN (BUILT_IN_CSIN):
9701 if (validate_arg (arg0, COMPLEX_TYPE)
9702 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9703 return do_mpc_arg1 (arg0, type, mpc_sin);
9704 break;
9706 CASE_FLT_FN (BUILT_IN_CSINH):
9707 if (validate_arg (arg0, COMPLEX_TYPE)
9708 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9709 return do_mpc_arg1 (arg0, type, mpc_sinh);
9710 break;
9712 CASE_FLT_FN (BUILT_IN_CTAN):
9713 if (validate_arg (arg0, COMPLEX_TYPE)
9714 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9715 return do_mpc_arg1 (arg0, type, mpc_tan);
9716 break;
9718 CASE_FLT_FN (BUILT_IN_CTANH):
9719 if (validate_arg (arg0, COMPLEX_TYPE)
9720 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9721 return do_mpc_arg1 (arg0, type, mpc_tanh);
9722 break;
9724 CASE_FLT_FN (BUILT_IN_CLOG):
9725 if (validate_arg (arg0, COMPLEX_TYPE)
9726 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9727 return do_mpc_arg1 (arg0, type, mpc_log);
9728 break;
9730 CASE_FLT_FN (BUILT_IN_CSQRT):
9731 if (validate_arg (arg0, COMPLEX_TYPE)
9732 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9733 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9734 break;
9736 CASE_FLT_FN (BUILT_IN_CASIN):
9737 if (validate_arg (arg0, COMPLEX_TYPE)
9738 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9739 return do_mpc_arg1 (arg0, type, mpc_asin);
9740 break;
9742 CASE_FLT_FN (BUILT_IN_CACOS):
9743 if (validate_arg (arg0, COMPLEX_TYPE)
9744 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9745 return do_mpc_arg1 (arg0, type, mpc_acos);
9746 break;
9748 CASE_FLT_FN (BUILT_IN_CATAN):
9749 if (validate_arg (arg0, COMPLEX_TYPE)
9750 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9751 return do_mpc_arg1 (arg0, type, mpc_atan);
9752 break;
9754 CASE_FLT_FN (BUILT_IN_CASINH):
9755 if (validate_arg (arg0, COMPLEX_TYPE)
9756 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9757 return do_mpc_arg1 (arg0, type, mpc_asinh);
9758 break;
9760 CASE_FLT_FN (BUILT_IN_CACOSH):
9761 if (validate_arg (arg0, COMPLEX_TYPE)
9762 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9763 return do_mpc_arg1 (arg0, type, mpc_acosh);
9764 break;
9766 CASE_FLT_FN (BUILT_IN_CATANH):
9767 if (validate_arg (arg0, COMPLEX_TYPE)
9768 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9769 return do_mpc_arg1 (arg0, type, mpc_atanh);
9770 break;
9772 CASE_FLT_FN (BUILT_IN_CABS):
9773 return fold_builtin_cabs (loc, arg0, type, fndecl);
9775 CASE_FLT_FN (BUILT_IN_CARG):
9776 return fold_builtin_carg (loc, arg0, type);
9778 CASE_FLT_FN (BUILT_IN_SQRT):
9779 return fold_builtin_sqrt (loc, arg0, type);
9781 CASE_FLT_FN (BUILT_IN_CBRT):
9782 return fold_builtin_cbrt (loc, arg0, type);
9784 CASE_FLT_FN (BUILT_IN_ASIN):
9785 if (validate_arg (arg0, REAL_TYPE))
9786 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9787 &dconstm1, &dconst1, true);
9788 break;
9790 CASE_FLT_FN (BUILT_IN_ACOS):
9791 if (validate_arg (arg0, REAL_TYPE))
9792 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9793 &dconstm1, &dconst1, true);
9794 break;
9796 CASE_FLT_FN (BUILT_IN_ATAN):
9797 if (validate_arg (arg0, REAL_TYPE))
9798 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9799 break;
9801 CASE_FLT_FN (BUILT_IN_ASINH):
9802 if (validate_arg (arg0, REAL_TYPE))
9803 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9804 break;
9806 CASE_FLT_FN (BUILT_IN_ACOSH):
9807 if (validate_arg (arg0, REAL_TYPE))
9808 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9809 &dconst1, NULL, true);
9810 break;
9812 CASE_FLT_FN (BUILT_IN_ATANH):
9813 if (validate_arg (arg0, REAL_TYPE))
9814 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9815 &dconstm1, &dconst1, false);
9816 break;
9818 CASE_FLT_FN (BUILT_IN_SIN):
9819 if (validate_arg (arg0, REAL_TYPE))
9820 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9821 break;
9823 CASE_FLT_FN (BUILT_IN_COS):
9824 return fold_builtin_cos (loc, arg0, type, fndecl);
9826 CASE_FLT_FN (BUILT_IN_TAN):
9827 return fold_builtin_tan (arg0, type);
9829 CASE_FLT_FN (BUILT_IN_CEXP):
9830 return fold_builtin_cexp (loc, arg0, type);
9832 CASE_FLT_FN (BUILT_IN_CEXPI):
9833 if (validate_arg (arg0, REAL_TYPE))
9834 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9835 break;
9837 CASE_FLT_FN (BUILT_IN_SINH):
9838 if (validate_arg (arg0, REAL_TYPE))
9839 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9840 break;
9842 CASE_FLT_FN (BUILT_IN_COSH):
9843 return fold_builtin_cosh (loc, arg0, type, fndecl);
9845 CASE_FLT_FN (BUILT_IN_TANH):
9846 if (validate_arg (arg0, REAL_TYPE))
9847 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9848 break;
9850 CASE_FLT_FN (BUILT_IN_ERF):
9851 if (validate_arg (arg0, REAL_TYPE))
9852 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9853 break;
9855 CASE_FLT_FN (BUILT_IN_ERFC):
9856 if (validate_arg (arg0, REAL_TYPE))
9857 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9858 break;
9860 CASE_FLT_FN (BUILT_IN_TGAMMA):
9861 if (validate_arg (arg0, REAL_TYPE))
9862 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9863 break;
9865 CASE_FLT_FN (BUILT_IN_EXP):
9866 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9868 CASE_FLT_FN (BUILT_IN_EXP2):
9869 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9871 CASE_FLT_FN (BUILT_IN_EXP10):
9872 CASE_FLT_FN (BUILT_IN_POW10):
9873 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9875 CASE_FLT_FN (BUILT_IN_EXPM1):
9876 if (validate_arg (arg0, REAL_TYPE))
9877 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9878 break;
9880 CASE_FLT_FN (BUILT_IN_LOG):
9881 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9883 CASE_FLT_FN (BUILT_IN_LOG2):
9884 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9886 CASE_FLT_FN (BUILT_IN_LOG10):
9887 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9889 CASE_FLT_FN (BUILT_IN_LOG1P):
9890 if (validate_arg (arg0, REAL_TYPE))
9891 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9892 &dconstm1, NULL, false);
9893 break;
9895 CASE_FLT_FN (BUILT_IN_J0):
9896 if (validate_arg (arg0, REAL_TYPE))
9897 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9898 NULL, NULL, 0);
9899 break;
9901 CASE_FLT_FN (BUILT_IN_J1):
9902 if (validate_arg (arg0, REAL_TYPE))
9903 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9904 NULL, NULL, 0);
9905 break;
9907 CASE_FLT_FN (BUILT_IN_Y0):
9908 if (validate_arg (arg0, REAL_TYPE))
9909 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9910 &dconst0, NULL, false);
9911 break;
9913 CASE_FLT_FN (BUILT_IN_Y1):
9914 if (validate_arg (arg0, REAL_TYPE))
9915 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9916 &dconst0, NULL, false);
9917 break;
9919 CASE_FLT_FN (BUILT_IN_NAN):
9920 case BUILT_IN_NAND32:
9921 case BUILT_IN_NAND64:
9922 case BUILT_IN_NAND128:
9923 return fold_builtin_nan (arg0, type, true);
9925 CASE_FLT_FN (BUILT_IN_NANS):
9926 return fold_builtin_nan (arg0, type, false);
9928 CASE_FLT_FN (BUILT_IN_FLOOR):
9929 return fold_builtin_floor (loc, fndecl, arg0);
9931 CASE_FLT_FN (BUILT_IN_CEIL):
9932 return fold_builtin_ceil (loc, fndecl, arg0);
9934 CASE_FLT_FN (BUILT_IN_TRUNC):
9935 return fold_builtin_trunc (loc, fndecl, arg0);
9937 CASE_FLT_FN (BUILT_IN_ROUND):
9938 return fold_builtin_round (loc, fndecl, arg0);
9940 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9941 CASE_FLT_FN (BUILT_IN_RINT):
9942 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9944 CASE_FLT_FN (BUILT_IN_LCEIL):
9945 CASE_FLT_FN (BUILT_IN_LLCEIL):
9946 CASE_FLT_FN (BUILT_IN_LFLOOR):
9947 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9948 CASE_FLT_FN (BUILT_IN_LROUND):
9949 CASE_FLT_FN (BUILT_IN_LLROUND):
9950 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9952 CASE_FLT_FN (BUILT_IN_LRINT):
9953 CASE_FLT_FN (BUILT_IN_LLRINT):
9954 return fold_fixed_mathfn (loc, fndecl, arg0);
9956 case BUILT_IN_BSWAP32:
9957 case BUILT_IN_BSWAP64:
9958 return fold_builtin_bswap (fndecl, arg0);
9960 CASE_INT_FN (BUILT_IN_FFS):
9961 CASE_INT_FN (BUILT_IN_CLZ):
9962 CASE_INT_FN (BUILT_IN_CTZ):
9963 CASE_INT_FN (BUILT_IN_POPCOUNT):
9964 CASE_INT_FN (BUILT_IN_PARITY):
9965 return fold_builtin_bitop (fndecl, arg0);
9967 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9968 return fold_builtin_signbit (loc, arg0, type);
9970 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9971 return fold_builtin_significand (loc, arg0, type);
9973 CASE_FLT_FN (BUILT_IN_ILOGB):
9974 CASE_FLT_FN (BUILT_IN_LOGB):
9975 return fold_builtin_logb (loc, arg0, type);
9977 case BUILT_IN_ISASCII:
9978 return fold_builtin_isascii (loc, arg0);
9980 case BUILT_IN_TOASCII:
9981 return fold_builtin_toascii (loc, arg0);
9983 case BUILT_IN_ISDIGIT:
9984 return fold_builtin_isdigit (loc, arg0);
9986 CASE_FLT_FN (BUILT_IN_FINITE):
9987 case BUILT_IN_FINITED32:
9988 case BUILT_IN_FINITED64:
9989 case BUILT_IN_FINITED128:
9990 case BUILT_IN_ISFINITE:
9992 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9993 if (ret)
9994 return ret;
9995 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9998 CASE_FLT_FN (BUILT_IN_ISINF):
9999 case BUILT_IN_ISINFD32:
10000 case BUILT_IN_ISINFD64:
10001 case BUILT_IN_ISINFD128:
10003 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10004 if (ret)
10005 return ret;
10006 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10009 case BUILT_IN_ISNORMAL:
10010 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10012 case BUILT_IN_ISINF_SIGN:
10013 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10015 CASE_FLT_FN (BUILT_IN_ISNAN):
10016 case BUILT_IN_ISNAND32:
10017 case BUILT_IN_ISNAND64:
10018 case BUILT_IN_ISNAND128:
10019 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10021 case BUILT_IN_PRINTF:
10022 case BUILT_IN_PRINTF_UNLOCKED:
10023 case BUILT_IN_VPRINTF:
10024 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10026 default:
10027 break;
10030 return NULL_TREE;
10034 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10035 IGNORE is true if the result of the function call is ignored. This
10036 function returns NULL_TREE if no simplification was possible. */
10038 static tree
10039 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10041 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10042 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10044 switch (fcode)
10046 CASE_FLT_FN (BUILT_IN_JN):
10047 if (validate_arg (arg0, INTEGER_TYPE)
10048 && validate_arg (arg1, REAL_TYPE))
10049 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10050 break;
10052 CASE_FLT_FN (BUILT_IN_YN):
10053 if (validate_arg (arg0, INTEGER_TYPE)
10054 && validate_arg (arg1, REAL_TYPE))
10055 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10056 &dconst0, false);
10057 break;
10059 CASE_FLT_FN (BUILT_IN_DREM):
10060 CASE_FLT_FN (BUILT_IN_REMAINDER):
10061 if (validate_arg (arg0, REAL_TYPE)
10062 && validate_arg(arg1, REAL_TYPE))
10063 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10064 break;
10066 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10067 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10068 if (validate_arg (arg0, REAL_TYPE)
10069 && validate_arg(arg1, POINTER_TYPE))
10070 return do_mpfr_lgamma_r (arg0, arg1, type);
10071 break;
10073 CASE_FLT_FN (BUILT_IN_ATAN2):
10074 if (validate_arg (arg0, REAL_TYPE)
10075 && validate_arg(arg1, REAL_TYPE))
10076 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10077 break;
10079 CASE_FLT_FN (BUILT_IN_FDIM):
10080 if (validate_arg (arg0, REAL_TYPE)
10081 && validate_arg(arg1, REAL_TYPE))
10082 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10083 break;
10085 CASE_FLT_FN (BUILT_IN_HYPOT):
10086 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10088 CASE_FLT_FN (BUILT_IN_CPOW):
10089 if (validate_arg (arg0, COMPLEX_TYPE)
10090 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10091 && validate_arg (arg1, COMPLEX_TYPE)
10092 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10093 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10094 break;
10096 CASE_FLT_FN (BUILT_IN_LDEXP):
10097 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10098 CASE_FLT_FN (BUILT_IN_SCALBN):
10099 CASE_FLT_FN (BUILT_IN_SCALBLN):
10100 return fold_builtin_load_exponent (loc, arg0, arg1,
10101 type, /*ldexp=*/false);
10103 CASE_FLT_FN (BUILT_IN_FREXP):
10104 return fold_builtin_frexp (loc, arg0, arg1, type);
10106 CASE_FLT_FN (BUILT_IN_MODF):
10107 return fold_builtin_modf (loc, arg0, arg1, type);
10109 case BUILT_IN_BZERO:
10110 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10112 case BUILT_IN_FPUTS:
10113 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10115 case BUILT_IN_FPUTS_UNLOCKED:
10116 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10118 case BUILT_IN_STRSTR:
10119 return fold_builtin_strstr (loc, arg0, arg1, type);
10121 case BUILT_IN_STRCAT:
10122 return fold_builtin_strcat (loc, arg0, arg1);
10124 case BUILT_IN_STRSPN:
10125 return fold_builtin_strspn (loc, arg0, arg1);
10127 case BUILT_IN_STRCSPN:
10128 return fold_builtin_strcspn (loc, arg0, arg1);
10130 case BUILT_IN_STRCHR:
10131 case BUILT_IN_INDEX:
10132 return fold_builtin_strchr (loc, arg0, arg1, type);
10134 case BUILT_IN_STRRCHR:
10135 case BUILT_IN_RINDEX:
10136 return fold_builtin_strrchr (loc, arg0, arg1, type);
10138 case BUILT_IN_STRCPY:
10139 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10141 case BUILT_IN_STPCPY:
10142 if (ignore)
10144 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10145 if (!fn)
10146 break;
10148 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10150 else
10151 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10152 break;
10154 case BUILT_IN_STRCMP:
10155 return fold_builtin_strcmp (loc, arg0, arg1);
10157 case BUILT_IN_STRPBRK:
10158 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10160 case BUILT_IN_EXPECT:
10161 return fold_builtin_expect (loc, arg0, arg1);
10163 CASE_FLT_FN (BUILT_IN_POW):
10164 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10166 CASE_FLT_FN (BUILT_IN_POWI):
10167 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10169 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10170 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10172 CASE_FLT_FN (BUILT_IN_FMIN):
10173 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10175 CASE_FLT_FN (BUILT_IN_FMAX):
10176 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10178 case BUILT_IN_ISGREATER:
10179 return fold_builtin_unordered_cmp (loc, fndecl,
10180 arg0, arg1, UNLE_EXPR, LE_EXPR);
10181 case BUILT_IN_ISGREATEREQUAL:
10182 return fold_builtin_unordered_cmp (loc, fndecl,
10183 arg0, arg1, UNLT_EXPR, LT_EXPR);
10184 case BUILT_IN_ISLESS:
10185 return fold_builtin_unordered_cmp (loc, fndecl,
10186 arg0, arg1, UNGE_EXPR, GE_EXPR);
10187 case BUILT_IN_ISLESSEQUAL:
10188 return fold_builtin_unordered_cmp (loc, fndecl,
10189 arg0, arg1, UNGT_EXPR, GT_EXPR);
10190 case BUILT_IN_ISLESSGREATER:
10191 return fold_builtin_unordered_cmp (loc, fndecl,
10192 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10193 case BUILT_IN_ISUNORDERED:
10194 return fold_builtin_unordered_cmp (loc, fndecl,
10195 arg0, arg1, UNORDERED_EXPR,
10196 NOP_EXPR);
10198 /* We do the folding for va_start in the expander. */
10199 case BUILT_IN_VA_START:
10200 break;
10202 case BUILT_IN_SPRINTF:
10203 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10205 case BUILT_IN_OBJECT_SIZE:
10206 return fold_builtin_object_size (arg0, arg1);
10208 case BUILT_IN_PRINTF:
10209 case BUILT_IN_PRINTF_UNLOCKED:
10210 case BUILT_IN_VPRINTF:
10211 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10213 case BUILT_IN_PRINTF_CHK:
10214 case BUILT_IN_VPRINTF_CHK:
10215 if (!validate_arg (arg0, INTEGER_TYPE)
10216 || TREE_SIDE_EFFECTS (arg0))
10217 return NULL_TREE;
10218 else
10219 return fold_builtin_printf (loc, fndecl,
10220 arg1, NULL_TREE, ignore, fcode);
10221 break;
10223 case BUILT_IN_FPRINTF:
10224 case BUILT_IN_FPRINTF_UNLOCKED:
10225 case BUILT_IN_VFPRINTF:
10226 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10227 ignore, fcode);
10229 default:
10230 break;
10232 return NULL_TREE;
10235 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10236 and ARG2. IGNORE is true if the result of the function call is ignored.
10237 This function returns NULL_TREE if no simplification was possible. */
10239 static tree
10240 fold_builtin_3 (location_t loc, tree fndecl,
10241 tree arg0, tree arg1, tree arg2, bool ignore)
10243 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10244 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10245 switch (fcode)
10248 CASE_FLT_FN (BUILT_IN_SINCOS):
10249 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10251 CASE_FLT_FN (BUILT_IN_FMA):
10252 if (validate_arg (arg0, REAL_TYPE)
10253 && validate_arg(arg1, REAL_TYPE)
10254 && validate_arg(arg2, REAL_TYPE))
10255 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10256 break;
10258 CASE_FLT_FN (BUILT_IN_REMQUO):
10259 if (validate_arg (arg0, REAL_TYPE)
10260 && validate_arg(arg1, REAL_TYPE)
10261 && validate_arg(arg2, POINTER_TYPE))
10262 return do_mpfr_remquo (arg0, arg1, arg2);
10263 break;
10265 case BUILT_IN_MEMSET:
10266 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10268 case BUILT_IN_BCOPY:
10269 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10270 void_type_node, true, /*endp=*/3);
10272 case BUILT_IN_MEMCPY:
10273 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10274 type, ignore, /*endp=*/0);
10276 case BUILT_IN_MEMPCPY:
10277 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10278 type, ignore, /*endp=*/1);
10280 case BUILT_IN_MEMMOVE:
10281 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10282 type, ignore, /*endp=*/3);
10284 case BUILT_IN_STRNCAT:
10285 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10287 case BUILT_IN_STRNCPY:
10288 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10290 case BUILT_IN_STRNCMP:
10291 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10293 case BUILT_IN_MEMCHR:
10294 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10296 case BUILT_IN_BCMP:
10297 case BUILT_IN_MEMCMP:
10298 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10300 case BUILT_IN_SPRINTF:
10301 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10303 case BUILT_IN_STRCPY_CHK:
10304 case BUILT_IN_STPCPY_CHK:
10305 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10306 ignore, fcode);
10308 case BUILT_IN_STRCAT_CHK:
10309 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10311 case BUILT_IN_PRINTF_CHK:
10312 case BUILT_IN_VPRINTF_CHK:
10313 if (!validate_arg (arg0, INTEGER_TYPE)
10314 || TREE_SIDE_EFFECTS (arg0))
10315 return NULL_TREE;
10316 else
10317 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10318 break;
10320 case BUILT_IN_FPRINTF:
10321 case BUILT_IN_FPRINTF_UNLOCKED:
10322 case BUILT_IN_VFPRINTF:
10323 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10324 ignore, fcode);
10326 case BUILT_IN_FPRINTF_CHK:
10327 case BUILT_IN_VFPRINTF_CHK:
10328 if (!validate_arg (arg1, INTEGER_TYPE)
10329 || TREE_SIDE_EFFECTS (arg1))
10330 return NULL_TREE;
10331 else
10332 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10333 ignore, fcode);
10335 default:
10336 break;
10338 return NULL_TREE;
10341 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10342 ARG2, and ARG3. IGNORE is true if the result of the function call is
10343 ignored. This function returns NULL_TREE if no simplification was
10344 possible. */
10346 static tree
10347 fold_builtin_4 (location_t loc, tree fndecl,
10348 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10350 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10352 switch (fcode)
10354 case BUILT_IN_MEMCPY_CHK:
10355 case BUILT_IN_MEMPCPY_CHK:
10356 case BUILT_IN_MEMMOVE_CHK:
10357 case BUILT_IN_MEMSET_CHK:
10358 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10359 NULL_TREE, ignore,
10360 DECL_FUNCTION_CODE (fndecl));
10362 case BUILT_IN_STRNCPY_CHK:
10363 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10365 case BUILT_IN_STRNCAT_CHK:
10366 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10368 case BUILT_IN_FPRINTF_CHK:
10369 case BUILT_IN_VFPRINTF_CHK:
10370 if (!validate_arg (arg1, INTEGER_TYPE)
10371 || TREE_SIDE_EFFECTS (arg1))
10372 return NULL_TREE;
10373 else
10374 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10375 ignore, fcode);
10376 break;
10378 default:
10379 break;
10381 return NULL_TREE;
10384 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10385 arguments, where NARGS <= 4. IGNORE is true if the result of the
10386 function call is ignored. This function returns NULL_TREE if no
10387 simplification was possible. Note that this only folds builtins with
10388 fixed argument patterns. Foldings that do varargs-to-varargs
10389 transformations, or that match calls with more than 4 arguments,
10390 need to be handled with fold_builtin_varargs instead. */
10392 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10394 static tree
10395 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10397 tree ret = NULL_TREE;
10399 switch (nargs)
10401 case 0:
10402 ret = fold_builtin_0 (loc, fndecl, ignore);
10403 break;
10404 case 1:
10405 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10406 break;
10407 case 2:
10408 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10409 break;
10410 case 3:
10411 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10412 break;
10413 case 4:
10414 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10415 ignore);
10416 break;
10417 default:
10418 break;
10420 if (ret)
10422 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10423 SET_EXPR_LOCATION (ret, loc);
10424 TREE_NO_WARNING (ret) = 1;
10425 return ret;
10427 return NULL_TREE;
10430 /* Builtins with folding operations that operate on "..." arguments
10431 need special handling; we need to store the arguments in a convenient
10432 data structure before attempting any folding. Fortunately there are
10433 only a few builtins that fall into this category. FNDECL is the
10434 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10435 result of the function call is ignored. */
10437 static tree
10438 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10439 bool ignore ATTRIBUTE_UNUSED)
10441 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10442 tree ret = NULL_TREE;
10444 switch (fcode)
10446 case BUILT_IN_SPRINTF_CHK:
10447 case BUILT_IN_VSPRINTF_CHK:
10448 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10449 break;
10451 case BUILT_IN_SNPRINTF_CHK:
10452 case BUILT_IN_VSNPRINTF_CHK:
10453 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10454 break;
10456 case BUILT_IN_FPCLASSIFY:
10457 ret = fold_builtin_fpclassify (loc, exp);
10458 break;
10460 default:
10461 break;
10463 if (ret)
10465 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10466 SET_EXPR_LOCATION (ret, loc);
10467 TREE_NO_WARNING (ret) = 1;
10468 return ret;
10470 return NULL_TREE;
10473 /* Return true if FNDECL shouldn't be folded right now.
10474 If a built-in function has an inline attribute always_inline
10475 wrapper, defer folding it after always_inline functions have
10476 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10477 might not be performed. */
10479 static bool
10480 avoid_folding_inline_builtin (tree fndecl)
10482 return (DECL_DECLARED_INLINE_P (fndecl)
10483 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10484 && cfun
10485 && !cfun->always_inline_functions_inlined
10486 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10489 /* A wrapper function for builtin folding that prevents warnings for
10490 "statement without effect" and the like, caused by removing the
10491 call node earlier than the warning is generated. */
10493 tree
10494 fold_call_expr (location_t loc, tree exp, bool ignore)
10496 tree ret = NULL_TREE;
10497 tree fndecl = get_callee_fndecl (exp);
10498 if (fndecl
10499 && TREE_CODE (fndecl) == FUNCTION_DECL
10500 && DECL_BUILT_IN (fndecl)
10501 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10502 yet. Defer folding until we see all the arguments
10503 (after inlining). */
10504 && !CALL_EXPR_VA_ARG_PACK (exp))
10506 int nargs = call_expr_nargs (exp);
10508 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10509 instead last argument is __builtin_va_arg_pack (). Defer folding
10510 even in that case, until arguments are finalized. */
10511 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10513 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10514 if (fndecl2
10515 && TREE_CODE (fndecl2) == FUNCTION_DECL
10516 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10517 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10518 return NULL_TREE;
10521 if (avoid_folding_inline_builtin (fndecl))
10522 return NULL_TREE;
10524 /* FIXME: Don't use a list in this interface. */
10525 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10526 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10527 else
10529 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10531 tree *args = CALL_EXPR_ARGP (exp);
10532 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10534 if (!ret)
10535 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10536 if (ret)
10537 return ret;
10540 return NULL_TREE;
10543 /* Conveniently construct a function call expression. FNDECL names the
10544 function to be called and ARGLIST is a TREE_LIST of arguments. */
10546 tree
10547 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10549 tree fntype = TREE_TYPE (fndecl);
10550 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10551 int n = list_length (arglist);
10552 tree *argarray = (tree *) alloca (n * sizeof (tree));
10553 int i;
10555 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10556 argarray[i] = TREE_VALUE (arglist);
10557 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10560 /* Conveniently construct a function call expression. FNDECL names the
10561 function to be called, N is the number of arguments, and the "..."
10562 parameters are the argument expressions. */
10564 tree
10565 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10567 va_list ap;
10568 tree fntype = TREE_TYPE (fndecl);
10569 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10570 tree *argarray = (tree *) alloca (n * sizeof (tree));
10571 int i;
10573 va_start (ap, n);
10574 for (i = 0; i < n; i++)
10575 argarray[i] = va_arg (ap, tree);
10576 va_end (ap);
10577 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10580 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10581 N arguments are passed in the array ARGARRAY. */
10583 tree
10584 fold_builtin_call_array (location_t loc, tree type,
10585 tree fn,
10586 int n,
10587 tree *argarray)
10589 tree ret = NULL_TREE;
10590 int i;
10591 tree exp;
10593 if (TREE_CODE (fn) == ADDR_EXPR)
10595 tree fndecl = TREE_OPERAND (fn, 0);
10596 if (TREE_CODE (fndecl) == FUNCTION_DECL
10597 && DECL_BUILT_IN (fndecl))
10599 /* If last argument is __builtin_va_arg_pack (), arguments to this
10600 function are not finalized yet. Defer folding until they are. */
10601 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10603 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10604 if (fndecl2
10605 && TREE_CODE (fndecl2) == FUNCTION_DECL
10606 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10607 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10608 return build_call_array_loc (loc, type, fn, n, argarray);
10610 if (avoid_folding_inline_builtin (fndecl))
10611 return build_call_array_loc (loc, type, fn, n, argarray);
10612 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10614 tree arglist = NULL_TREE;
10615 for (i = n - 1; i >= 0; i--)
10616 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10617 ret = targetm.fold_builtin (fndecl, arglist, false);
10618 if (ret)
10619 return ret;
10620 return build_call_array_loc (loc, type, fn, n, argarray);
10622 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10624 /* First try the transformations that don't require consing up
10625 an exp. */
10626 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10627 if (ret)
10628 return ret;
10631 /* If we got this far, we need to build an exp. */
10632 exp = build_call_array_loc (loc, type, fn, n, argarray);
10633 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10634 return ret ? ret : exp;
10638 return build_call_array_loc (loc, type, fn, n, argarray);
10641 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10642 along with N new arguments specified as the "..." parameters. SKIP
10643 is the number of arguments in EXP to be omitted. This function is used
10644 to do varargs-to-varargs transformations. */
10646 static tree
10647 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10649 int oldnargs = call_expr_nargs (exp);
10650 int nargs = oldnargs - skip + n;
10651 tree fntype = TREE_TYPE (fndecl);
10652 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10653 tree *buffer;
10655 if (n > 0)
10657 int i, j;
10658 va_list ap;
10660 buffer = XALLOCAVEC (tree, nargs);
10661 va_start (ap, n);
10662 for (i = 0; i < n; i++)
10663 buffer[i] = va_arg (ap, tree);
10664 va_end (ap);
10665 for (j = skip; j < oldnargs; j++, i++)
10666 buffer[i] = CALL_EXPR_ARG (exp, j);
10668 else
10669 buffer = CALL_EXPR_ARGP (exp) + skip;
10671 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10674 /* Validate a single argument ARG against a tree code CODE representing
10675 a type. */
10677 static bool
10678 validate_arg (const_tree arg, enum tree_code code)
10680 if (!arg)
10681 return false;
10682 else if (code == POINTER_TYPE)
10683 return POINTER_TYPE_P (TREE_TYPE (arg));
10684 else if (code == INTEGER_TYPE)
10685 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10686 return code == TREE_CODE (TREE_TYPE (arg));
10689 /* This function validates the types of a function call argument list
10690 against a specified list of tree_codes. If the last specifier is a 0,
10691 that represents an ellipses, otherwise the last specifier must be a
10692 VOID_TYPE.
10694 This is the GIMPLE version of validate_arglist. Eventually we want to
10695 completely convert builtins.c to work from GIMPLEs and the tree based
10696 validate_arglist will then be removed. */
10698 bool
10699 validate_gimple_arglist (const_gimple call, ...)
10701 enum tree_code code;
10702 bool res = 0;
10703 va_list ap;
10704 const_tree arg;
10705 size_t i;
10707 va_start (ap, call);
10708 i = 0;
10712 code = (enum tree_code) va_arg (ap, int);
10713 switch (code)
10715 case 0:
10716 /* This signifies an ellipses, any further arguments are all ok. */
10717 res = true;
10718 goto end;
10719 case VOID_TYPE:
10720 /* This signifies an endlink, if no arguments remain, return
10721 true, otherwise return false. */
10722 res = (i == gimple_call_num_args (call));
10723 goto end;
10724 default:
10725 /* If no parameters remain or the parameter's code does not
10726 match the specified code, return false. Otherwise continue
10727 checking any remaining arguments. */
10728 arg = gimple_call_arg (call, i++);
10729 if (!validate_arg (arg, code))
10730 goto end;
10731 break;
10734 while (1);
10736 /* We need gotos here since we can only have one VA_CLOSE in a
10737 function. */
10738 end: ;
10739 va_end (ap);
10741 return res;
10744 /* This function validates the types of a function call argument list
10745 against a specified list of tree_codes. If the last specifier is a 0,
10746 that represents an ellipses, otherwise the last specifier must be a
10747 VOID_TYPE. */
10749 bool
10750 validate_arglist (const_tree callexpr, ...)
10752 enum tree_code code;
10753 bool res = 0;
10754 va_list ap;
10755 const_call_expr_arg_iterator iter;
10756 const_tree arg;
10758 va_start (ap, callexpr);
10759 init_const_call_expr_arg_iterator (callexpr, &iter);
10763 code = (enum tree_code) va_arg (ap, int);
10764 switch (code)
10766 case 0:
10767 /* This signifies an ellipses, any further arguments are all ok. */
10768 res = true;
10769 goto end;
10770 case VOID_TYPE:
10771 /* This signifies an endlink, if no arguments remain, return
10772 true, otherwise return false. */
10773 res = !more_const_call_expr_args_p (&iter);
10774 goto end;
10775 default:
10776 /* If no parameters remain or the parameter's code does not
10777 match the specified code, return false. Otherwise continue
10778 checking any remaining arguments. */
10779 arg = next_const_call_expr_arg (&iter);
10780 if (!validate_arg (arg, code))
10781 goto end;
10782 break;
10785 while (1);
10787 /* We need gotos here since we can only have one VA_CLOSE in a
10788 function. */
10789 end: ;
10790 va_end (ap);
10792 return res;
10795 /* Default target-specific builtin expander that does nothing. */
10798 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10799 rtx target ATTRIBUTE_UNUSED,
10800 rtx subtarget ATTRIBUTE_UNUSED,
10801 enum machine_mode mode ATTRIBUTE_UNUSED,
10802 int ignore ATTRIBUTE_UNUSED)
10804 return NULL_RTX;
10807 /* Returns true is EXP represents data that would potentially reside
10808 in a readonly section. */
10810 static bool
10811 readonly_data_expr (tree exp)
10813 STRIP_NOPS (exp);
10815 if (TREE_CODE (exp) != ADDR_EXPR)
10816 return false;
10818 exp = get_base_address (TREE_OPERAND (exp, 0));
10819 if (!exp)
10820 return false;
10822 /* Make sure we call decl_readonly_section only for trees it
10823 can handle (since it returns true for everything it doesn't
10824 understand). */
10825 if (TREE_CODE (exp) == STRING_CST
10826 || TREE_CODE (exp) == CONSTRUCTOR
10827 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10828 return decl_readonly_section (exp, 0);
10829 else
10830 return false;
10833 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10834 to the call, and TYPE is its return type.
10836 Return NULL_TREE if no simplification was possible, otherwise return the
10837 simplified form of the call as a tree.
10839 The simplified form may be a constant or other expression which
10840 computes the same value, but in a more efficient manner (including
10841 calls to other builtin functions).
10843 The call may contain arguments which need to be evaluated, but
10844 which are not useful to determine the result of the call. In
10845 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10846 COMPOUND_EXPR will be an argument which must be evaluated.
10847 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10848 COMPOUND_EXPR in the chain will contain the tree for the simplified
10849 form of the builtin function call. */
10851 static tree
10852 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10854 if (!validate_arg (s1, POINTER_TYPE)
10855 || !validate_arg (s2, POINTER_TYPE))
10856 return NULL_TREE;
10857 else
10859 tree fn;
10860 const char *p1, *p2;
10862 p2 = c_getstr (s2);
10863 if (p2 == NULL)
10864 return NULL_TREE;
10866 p1 = c_getstr (s1);
10867 if (p1 != NULL)
10869 const char *r = strstr (p1, p2);
10870 tree tem;
10872 if (r == NULL)
10873 return build_int_cst (TREE_TYPE (s1), 0);
10875 /* Return an offset into the constant string argument. */
10876 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10877 s1, size_int (r - p1));
10878 return fold_convert_loc (loc, type, tem);
10881 /* The argument is const char *, and the result is char *, so we need
10882 a type conversion here to avoid a warning. */
10883 if (p2[0] == '\0')
10884 return fold_convert_loc (loc, type, s1);
10886 if (p2[1] != '\0')
10887 return NULL_TREE;
10889 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10890 if (!fn)
10891 return NULL_TREE;
10893 /* New argument list transforming strstr(s1, s2) to
10894 strchr(s1, s2[0]). */
10895 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10899 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10900 the call, and TYPE is its return type.
10902 Return NULL_TREE if no simplification was possible, otherwise return the
10903 simplified form of the call as a tree.
10905 The simplified form may be a constant or other expression which
10906 computes the same value, but in a more efficient manner (including
10907 calls to other builtin functions).
10909 The call may contain arguments which need to be evaluated, but
10910 which are not useful to determine the result of the call. In
10911 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10912 COMPOUND_EXPR will be an argument which must be evaluated.
10913 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10914 COMPOUND_EXPR in the chain will contain the tree for the simplified
10915 form of the builtin function call. */
10917 static tree
10918 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10920 if (!validate_arg (s1, POINTER_TYPE)
10921 || !validate_arg (s2, INTEGER_TYPE))
10922 return NULL_TREE;
10923 else
10925 const char *p1;
10927 if (TREE_CODE (s2) != INTEGER_CST)
10928 return NULL_TREE;
10930 p1 = c_getstr (s1);
10931 if (p1 != NULL)
10933 char c;
10934 const char *r;
10935 tree tem;
10937 if (target_char_cast (s2, &c))
10938 return NULL_TREE;
10940 r = strchr (p1, c);
10942 if (r == NULL)
10943 return build_int_cst (TREE_TYPE (s1), 0);
10945 /* Return an offset into the constant string argument. */
10946 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10947 s1, size_int (r - p1));
10948 return fold_convert_loc (loc, type, tem);
10950 return NULL_TREE;
10954 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10955 the call, and TYPE is its return type.
10957 Return NULL_TREE if no simplification was possible, otherwise return the
10958 simplified form of the call as a tree.
10960 The simplified form may be a constant or other expression which
10961 computes the same value, but in a more efficient manner (including
10962 calls to other builtin functions).
10964 The call may contain arguments which need to be evaluated, but
10965 which are not useful to determine the result of the call. In
10966 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10967 COMPOUND_EXPR will be an argument which must be evaluated.
10968 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10969 COMPOUND_EXPR in the chain will contain the tree for the simplified
10970 form of the builtin function call. */
10972 static tree
10973 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10975 if (!validate_arg (s1, POINTER_TYPE)
10976 || !validate_arg (s2, INTEGER_TYPE))
10977 return NULL_TREE;
10978 else
10980 tree fn;
10981 const char *p1;
10983 if (TREE_CODE (s2) != INTEGER_CST)
10984 return NULL_TREE;
10986 p1 = c_getstr (s1);
10987 if (p1 != NULL)
10989 char c;
10990 const char *r;
10991 tree tem;
10993 if (target_char_cast (s2, &c))
10994 return NULL_TREE;
10996 r = strrchr (p1, c);
10998 if (r == NULL)
10999 return build_int_cst (TREE_TYPE (s1), 0);
11001 /* Return an offset into the constant string argument. */
11002 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11003 s1, size_int (r - p1));
11004 return fold_convert_loc (loc, type, tem);
11007 if (! integer_zerop (s2))
11008 return NULL_TREE;
11010 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11011 if (!fn)
11012 return NULL_TREE;
11014 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11015 return build_call_expr_loc (loc, fn, 2, s1, s2);
11019 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11020 to the call, and TYPE is its return type.
11022 Return NULL_TREE if no simplification was possible, otherwise return the
11023 simplified form of the call as a tree.
11025 The simplified form may be a constant or other expression which
11026 computes the same value, but in a more efficient manner (including
11027 calls to other builtin functions).
11029 The call may contain arguments which need to be evaluated, but
11030 which are not useful to determine the result of the call. In
11031 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11032 COMPOUND_EXPR will be an argument which must be evaluated.
11033 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11034 COMPOUND_EXPR in the chain will contain the tree for the simplified
11035 form of the builtin function call. */
11037 static tree
11038 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11040 if (!validate_arg (s1, POINTER_TYPE)
11041 || !validate_arg (s2, POINTER_TYPE))
11042 return NULL_TREE;
11043 else
11045 tree fn;
11046 const char *p1, *p2;
11048 p2 = c_getstr (s2);
11049 if (p2 == NULL)
11050 return NULL_TREE;
11052 p1 = c_getstr (s1);
11053 if (p1 != NULL)
11055 const char *r = strpbrk (p1, p2);
11056 tree tem;
11058 if (r == NULL)
11059 return build_int_cst (TREE_TYPE (s1), 0);
11061 /* Return an offset into the constant string argument. */
11062 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11063 s1, size_int (r - p1));
11064 return fold_convert_loc (loc, type, tem);
11067 if (p2[0] == '\0')
11068 /* strpbrk(x, "") == NULL.
11069 Evaluate and ignore s1 in case it had side-effects. */
11070 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11072 if (p2[1] != '\0')
11073 return NULL_TREE; /* Really call strpbrk. */
11075 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11076 if (!fn)
11077 return NULL_TREE;
11079 /* New argument list transforming strpbrk(s1, s2) to
11080 strchr(s1, s2[0]). */
11081 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11085 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11086 to the call.
11088 Return NULL_TREE if no simplification was possible, otherwise return the
11089 simplified form of the call as a tree.
11091 The simplified form may be a constant or other expression which
11092 computes the same value, but in a more efficient manner (including
11093 calls to other builtin functions).
11095 The call may contain arguments which need to be evaluated, but
11096 which are not useful to determine the result of the call. In
11097 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11098 COMPOUND_EXPR will be an argument which must be evaluated.
11099 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11100 COMPOUND_EXPR in the chain will contain the tree for the simplified
11101 form of the builtin function call. */
11103 static tree
11104 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11106 if (!validate_arg (dst, POINTER_TYPE)
11107 || !validate_arg (src, POINTER_TYPE))
11108 return NULL_TREE;
11109 else
11111 const char *p = c_getstr (src);
11113 /* If the string length is zero, return the dst parameter. */
11114 if (p && *p == '\0')
11115 return dst;
11117 if (optimize_insn_for_speed_p ())
11119 /* See if we can store by pieces into (dst + strlen(dst)). */
11120 tree newdst, call;
11121 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11122 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11124 if (!strlen_fn || !strcpy_fn)
11125 return NULL_TREE;
11127 /* If we don't have a movstr we don't want to emit an strcpy
11128 call. We have to do that if the length of the source string
11129 isn't computable (in that case we can use memcpy probably
11130 later expanding to a sequence of mov instructions). If we
11131 have movstr instructions we can emit strcpy calls. */
11132 if (!HAVE_movstr)
11134 tree len = c_strlen (src, 1);
11135 if (! len || TREE_SIDE_EFFECTS (len))
11136 return NULL_TREE;
11139 /* Stabilize the argument list. */
11140 dst = builtin_save_expr (dst);
11142 /* Create strlen (dst). */
11143 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11144 /* Create (dst p+ strlen (dst)). */
11146 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11147 TREE_TYPE (dst), dst, newdst);
11148 newdst = builtin_save_expr (newdst);
11150 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11151 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11153 return NULL_TREE;
11157 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11158 arguments to the call.
11160 Return NULL_TREE if no simplification was possible, otherwise return the
11161 simplified form of the call as a tree.
11163 The simplified form may be a constant or other expression which
11164 computes the same value, but in a more efficient manner (including
11165 calls to other builtin functions).
11167 The call may contain arguments which need to be evaluated, but
11168 which are not useful to determine the result of the call. In
11169 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11170 COMPOUND_EXPR will be an argument which must be evaluated.
11171 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11172 COMPOUND_EXPR in the chain will contain the tree for the simplified
11173 form of the builtin function call. */
11175 static tree
11176 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11178 if (!validate_arg (dst, POINTER_TYPE)
11179 || !validate_arg (src, POINTER_TYPE)
11180 || !validate_arg (len, INTEGER_TYPE))
11181 return NULL_TREE;
11182 else
11184 const char *p = c_getstr (src);
11186 /* If the requested length is zero, or the src parameter string
11187 length is zero, return the dst parameter. */
11188 if (integer_zerop (len) || (p && *p == '\0'))
11189 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11191 /* If the requested len is greater than or equal to the string
11192 length, call strcat. */
11193 if (TREE_CODE (len) == INTEGER_CST && p
11194 && compare_tree_int (len, strlen (p)) >= 0)
11196 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11198 /* If the replacement _DECL isn't initialized, don't do the
11199 transformation. */
11200 if (!fn)
11201 return NULL_TREE;
11203 return build_call_expr_loc (loc, fn, 2, dst, src);
11205 return NULL_TREE;
11209 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11210 to the call.
11212 Return NULL_TREE if no simplification was possible, otherwise return the
11213 simplified form of the call as a tree.
11215 The simplified form may be a constant or other expression which
11216 computes the same value, but in a more efficient manner (including
11217 calls to other builtin functions).
11219 The call may contain arguments which need to be evaluated, but
11220 which are not useful to determine the result of the call. In
11221 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11222 COMPOUND_EXPR will be an argument which must be evaluated.
11223 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11224 COMPOUND_EXPR in the chain will contain the tree for the simplified
11225 form of the builtin function call. */
11227 static tree
11228 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11230 if (!validate_arg (s1, POINTER_TYPE)
11231 || !validate_arg (s2, POINTER_TYPE))
11232 return NULL_TREE;
11233 else
11235 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11237 /* If both arguments are constants, evaluate at compile-time. */
11238 if (p1 && p2)
11240 const size_t r = strspn (p1, p2);
11241 return size_int (r);
11244 /* If either argument is "", return NULL_TREE. */
11245 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11246 /* Evaluate and ignore both arguments in case either one has
11247 side-effects. */
11248 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11249 s1, s2);
11250 return NULL_TREE;
11254 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11255 to the call.
11257 Return NULL_TREE if no simplification was possible, otherwise return the
11258 simplified form of the call as a tree.
11260 The simplified form may be a constant or other expression which
11261 computes the same value, but in a more efficient manner (including
11262 calls to other builtin functions).
11264 The call may contain arguments which need to be evaluated, but
11265 which are not useful to determine the result of the call. In
11266 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11267 COMPOUND_EXPR will be an argument which must be evaluated.
11268 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11269 COMPOUND_EXPR in the chain will contain the tree for the simplified
11270 form of the builtin function call. */
11272 static tree
11273 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11275 if (!validate_arg (s1, POINTER_TYPE)
11276 || !validate_arg (s2, POINTER_TYPE))
11277 return NULL_TREE;
11278 else
11280 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11282 /* If both arguments are constants, evaluate at compile-time. */
11283 if (p1 && p2)
11285 const size_t r = strcspn (p1, p2);
11286 return size_int (r);
11289 /* If the first argument is "", return NULL_TREE. */
11290 if (p1 && *p1 == '\0')
11292 /* Evaluate and ignore argument s2 in case it has
11293 side-effects. */
11294 return omit_one_operand_loc (loc, size_type_node,
11295 size_zero_node, s2);
11298 /* If the second argument is "", return __builtin_strlen(s1). */
11299 if (p2 && *p2 == '\0')
11301 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11303 /* If the replacement _DECL isn't initialized, don't do the
11304 transformation. */
11305 if (!fn)
11306 return NULL_TREE;
11308 return build_call_expr_loc (loc, fn, 1, s1);
11310 return NULL_TREE;
11314 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11315 to the call. IGNORE is true if the value returned
11316 by the builtin will be ignored. UNLOCKED is true is true if this
11317 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11318 the known length of the string. Return NULL_TREE if no simplification
11319 was possible. */
11321 tree
11322 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11323 bool ignore, bool unlocked, tree len)
11325 /* If we're using an unlocked function, assume the other unlocked
11326 functions exist explicitly. */
11327 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11328 : implicit_built_in_decls[BUILT_IN_FPUTC];
11329 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11330 : implicit_built_in_decls[BUILT_IN_FWRITE];
11332 /* If the return value is used, don't do the transformation. */
11333 if (!ignore)
11334 return NULL_TREE;
11336 /* Verify the arguments in the original call. */
11337 if (!validate_arg (arg0, POINTER_TYPE)
11338 || !validate_arg (arg1, POINTER_TYPE))
11339 return NULL_TREE;
11341 if (! len)
11342 len = c_strlen (arg0, 0);
11344 /* Get the length of the string passed to fputs. If the length
11345 can't be determined, punt. */
11346 if (!len
11347 || TREE_CODE (len) != INTEGER_CST)
11348 return NULL_TREE;
11350 switch (compare_tree_int (len, 1))
11352 case -1: /* length is 0, delete the call entirely . */
11353 return omit_one_operand_loc (loc, integer_type_node,
11354 integer_zero_node, arg1);;
11356 case 0: /* length is 1, call fputc. */
11358 const char *p = c_getstr (arg0);
11360 if (p != NULL)
11362 if (fn_fputc)
11363 return build_call_expr_loc (loc, fn_fputc, 2,
11364 build_int_cst (NULL_TREE, p[0]), arg1);
11365 else
11366 return NULL_TREE;
11369 /* FALLTHROUGH */
11370 case 1: /* length is greater than 1, call fwrite. */
11372 /* If optimizing for size keep fputs. */
11373 if (optimize_function_for_size_p (cfun))
11374 return NULL_TREE;
11375 /* New argument list transforming fputs(string, stream) to
11376 fwrite(string, 1, len, stream). */
11377 if (fn_fwrite)
11378 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11379 size_one_node, len, arg1);
11380 else
11381 return NULL_TREE;
11383 default:
11384 gcc_unreachable ();
11386 return NULL_TREE;
11389 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11390 produced. False otherwise. This is done so that we don't output the error
11391 or warning twice or three times. */
11393 bool
11394 fold_builtin_next_arg (tree exp, bool va_start_p)
11396 tree fntype = TREE_TYPE (current_function_decl);
11397 int nargs = call_expr_nargs (exp);
11398 tree arg;
11400 if (TYPE_ARG_TYPES (fntype) == 0
11401 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11402 == void_type_node))
11404 error ("%<va_start%> used in function with fixed args");
11405 return true;
11408 if (va_start_p)
11410 if (va_start_p && (nargs != 2))
11412 error ("wrong number of arguments to function %<va_start%>");
11413 return true;
11415 arg = CALL_EXPR_ARG (exp, 1);
11417 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11418 when we checked the arguments and if needed issued a warning. */
11419 else
11421 if (nargs == 0)
11423 /* Evidently an out of date version of <stdarg.h>; can't validate
11424 va_start's second argument, but can still work as intended. */
11425 warning (0, "%<__builtin_next_arg%> called without an argument");
11426 return true;
11428 else if (nargs > 1)
11430 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11431 return true;
11433 arg = CALL_EXPR_ARG (exp, 0);
11436 if (TREE_CODE (arg) == SSA_NAME)
11437 arg = SSA_NAME_VAR (arg);
11439 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11440 or __builtin_next_arg (0) the first time we see it, after checking
11441 the arguments and if needed issuing a warning. */
11442 if (!integer_zerop (arg))
11444 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11446 /* Strip off all nops for the sake of the comparison. This
11447 is not quite the same as STRIP_NOPS. It does more.
11448 We must also strip off INDIRECT_EXPR for C++ reference
11449 parameters. */
11450 while (CONVERT_EXPR_P (arg)
11451 || TREE_CODE (arg) == INDIRECT_REF)
11452 arg = TREE_OPERAND (arg, 0);
11453 if (arg != last_parm)
11455 /* FIXME: Sometimes with the tree optimizers we can get the
11456 not the last argument even though the user used the last
11457 argument. We just warn and set the arg to be the last
11458 argument so that we will get wrong-code because of
11459 it. */
11460 warning (0, "second parameter of %<va_start%> not last named argument");
11463 /* Undefined by C99 7.15.1.4p4 (va_start):
11464 "If the parameter parmN is declared with the register storage
11465 class, with a function or array type, or with a type that is
11466 not compatible with the type that results after application of
11467 the default argument promotions, the behavior is undefined."
11469 else if (DECL_REGISTER (arg))
11470 warning (0, "undefined behaviour when second parameter of "
11471 "%<va_start%> is declared with %<register%> storage");
11473 /* We want to verify the second parameter just once before the tree
11474 optimizers are run and then avoid keeping it in the tree,
11475 as otherwise we could warn even for correct code like:
11476 void foo (int i, ...)
11477 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11478 if (va_start_p)
11479 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11480 else
11481 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11483 return false;
11487 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11488 ORIG may be null if this is a 2-argument call. We don't attempt to
11489 simplify calls with more than 3 arguments.
11491 Return NULL_TREE if no simplification was possible, otherwise return the
11492 simplified form of the call as a tree. If IGNORED is true, it means that
11493 the caller does not use the returned value of the function. */
11495 static tree
11496 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11497 tree orig, int ignored)
11499 tree call, retval;
11500 const char *fmt_str = NULL;
11502 /* Verify the required arguments in the original call. We deal with two
11503 types of sprintf() calls: 'sprintf (str, fmt)' and
11504 'sprintf (dest, "%s", orig)'. */
11505 if (!validate_arg (dest, POINTER_TYPE)
11506 || !validate_arg (fmt, POINTER_TYPE))
11507 return NULL_TREE;
11508 if (orig && !validate_arg (orig, POINTER_TYPE))
11509 return NULL_TREE;
11511 /* Check whether the format is a literal string constant. */
11512 fmt_str = c_getstr (fmt);
11513 if (fmt_str == NULL)
11514 return NULL_TREE;
11516 call = NULL_TREE;
11517 retval = NULL_TREE;
11519 if (!init_target_chars ())
11520 return NULL_TREE;
11522 /* If the format doesn't contain % args or %%, use strcpy. */
11523 if (strchr (fmt_str, target_percent) == NULL)
11525 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11527 if (!fn)
11528 return NULL_TREE;
11530 /* Don't optimize sprintf (buf, "abc", ptr++). */
11531 if (orig)
11532 return NULL_TREE;
11534 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11535 'format' is known to contain no % formats. */
11536 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11537 if (!ignored)
11538 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11541 /* If the format is "%s", use strcpy if the result isn't used. */
11542 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11544 tree fn;
11545 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11547 if (!fn)
11548 return NULL_TREE;
11550 /* Don't crash on sprintf (str1, "%s"). */
11551 if (!orig)
11552 return NULL_TREE;
11554 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11555 if (!ignored)
11557 retval = c_strlen (orig, 1);
11558 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11559 return NULL_TREE;
11561 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11564 if (call && retval)
11566 retval = fold_convert_loc
11567 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11568 retval);
11569 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11571 else
11572 return call;
11575 /* Expand a call EXP to __builtin_object_size. */
11578 expand_builtin_object_size (tree exp)
11580 tree ost;
11581 int object_size_type;
11582 tree fndecl = get_callee_fndecl (exp);
11584 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11586 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11587 exp, fndecl);
11588 expand_builtin_trap ();
11589 return const0_rtx;
11592 ost = CALL_EXPR_ARG (exp, 1);
11593 STRIP_NOPS (ost);
11595 if (TREE_CODE (ost) != INTEGER_CST
11596 || tree_int_cst_sgn (ost) < 0
11597 || compare_tree_int (ost, 3) > 0)
11599 error ("%Klast argument of %D is not integer constant between 0 and 3",
11600 exp, fndecl);
11601 expand_builtin_trap ();
11602 return const0_rtx;
11605 object_size_type = tree_low_cst (ost, 0);
11607 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11610 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11611 FCODE is the BUILT_IN_* to use.
11612 Return NULL_RTX if we failed; the caller should emit a normal call,
11613 otherwise try to get the result in TARGET, if convenient (and in
11614 mode MODE if that's convenient). */
11616 static rtx
11617 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11618 enum built_in_function fcode)
11620 tree dest, src, len, size;
11622 if (!validate_arglist (exp,
11623 POINTER_TYPE,
11624 fcode == BUILT_IN_MEMSET_CHK
11625 ? INTEGER_TYPE : POINTER_TYPE,
11626 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11627 return NULL_RTX;
11629 dest = CALL_EXPR_ARG (exp, 0);
11630 src = CALL_EXPR_ARG (exp, 1);
11631 len = CALL_EXPR_ARG (exp, 2);
11632 size = CALL_EXPR_ARG (exp, 3);
11634 if (! host_integerp (size, 1))
11635 return NULL_RTX;
11637 if (host_integerp (len, 1) || integer_all_onesp (size))
11639 tree fn;
11641 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11643 warning_at (tree_nonartificial_location (exp),
11644 0, "%Kcall to %D will always overflow destination buffer",
11645 exp, get_callee_fndecl (exp));
11646 return NULL_RTX;
11649 fn = NULL_TREE;
11650 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11651 mem{cpy,pcpy,move,set} is available. */
11652 switch (fcode)
11654 case BUILT_IN_MEMCPY_CHK:
11655 fn = built_in_decls[BUILT_IN_MEMCPY];
11656 break;
11657 case BUILT_IN_MEMPCPY_CHK:
11658 fn = built_in_decls[BUILT_IN_MEMPCPY];
11659 break;
11660 case BUILT_IN_MEMMOVE_CHK:
11661 fn = built_in_decls[BUILT_IN_MEMMOVE];
11662 break;
11663 case BUILT_IN_MEMSET_CHK:
11664 fn = built_in_decls[BUILT_IN_MEMSET];
11665 break;
11666 default:
11667 break;
11670 if (! fn)
11671 return NULL_RTX;
11673 fn = build_call_nofold (fn, 3, dest, src, len);
11674 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11675 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11676 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11678 else if (fcode == BUILT_IN_MEMSET_CHK)
11679 return NULL_RTX;
11680 else
11682 unsigned int dest_align
11683 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11685 /* If DEST is not a pointer type, call the normal function. */
11686 if (dest_align == 0)
11687 return NULL_RTX;
11689 /* If SRC and DEST are the same (and not volatile), do nothing. */
11690 if (operand_equal_p (src, dest, 0))
11692 tree expr;
11694 if (fcode != BUILT_IN_MEMPCPY_CHK)
11696 /* Evaluate and ignore LEN in case it has side-effects. */
11697 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11698 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11701 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11702 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11705 /* __memmove_chk special case. */
11706 if (fcode == BUILT_IN_MEMMOVE_CHK)
11708 unsigned int src_align
11709 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11711 if (src_align == 0)
11712 return NULL_RTX;
11714 /* If src is categorized for a readonly section we can use
11715 normal __memcpy_chk. */
11716 if (readonly_data_expr (src))
11718 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11719 if (!fn)
11720 return NULL_RTX;
11721 fn = build_call_nofold (fn, 4, dest, src, len, size);
11722 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11723 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11724 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11727 return NULL_RTX;
11731 /* Emit warning if a buffer overflow is detected at compile time. */
11733 static void
11734 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11736 int is_strlen = 0;
11737 tree len, size;
11738 location_t loc = tree_nonartificial_location (exp);
11740 switch (fcode)
11742 case BUILT_IN_STRCPY_CHK:
11743 case BUILT_IN_STPCPY_CHK:
11744 /* For __strcat_chk the warning will be emitted only if overflowing
11745 by at least strlen (dest) + 1 bytes. */
11746 case BUILT_IN_STRCAT_CHK:
11747 len = CALL_EXPR_ARG (exp, 1);
11748 size = CALL_EXPR_ARG (exp, 2);
11749 is_strlen = 1;
11750 break;
11751 case BUILT_IN_STRNCAT_CHK:
11752 case BUILT_IN_STRNCPY_CHK:
11753 len = CALL_EXPR_ARG (exp, 2);
11754 size = CALL_EXPR_ARG (exp, 3);
11755 break;
11756 case BUILT_IN_SNPRINTF_CHK:
11757 case BUILT_IN_VSNPRINTF_CHK:
11758 len = CALL_EXPR_ARG (exp, 1);
11759 size = CALL_EXPR_ARG (exp, 3);
11760 break;
11761 default:
11762 gcc_unreachable ();
11765 if (!len || !size)
11766 return;
11768 if (! host_integerp (size, 1) || integer_all_onesp (size))
11769 return;
11771 if (is_strlen)
11773 len = c_strlen (len, 1);
11774 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11775 return;
11777 else if (fcode == BUILT_IN_STRNCAT_CHK)
11779 tree src = CALL_EXPR_ARG (exp, 1);
11780 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11781 return;
11782 src = c_strlen (src, 1);
11783 if (! src || ! host_integerp (src, 1))
11785 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11786 exp, get_callee_fndecl (exp));
11787 return;
11789 else if (tree_int_cst_lt (src, size))
11790 return;
11792 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11793 return;
11795 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11796 exp, get_callee_fndecl (exp));
11799 /* Emit warning if a buffer overflow is detected at compile time
11800 in __sprintf_chk/__vsprintf_chk calls. */
11802 static void
11803 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11805 tree size, len, fmt;
11806 const char *fmt_str;
11807 int nargs = call_expr_nargs (exp);
11809 /* Verify the required arguments in the original call. */
11811 if (nargs < 4)
11812 return;
11813 size = CALL_EXPR_ARG (exp, 2);
11814 fmt = CALL_EXPR_ARG (exp, 3);
11816 if (! host_integerp (size, 1) || integer_all_onesp (size))
11817 return;
11819 /* Check whether the format is a literal string constant. */
11820 fmt_str = c_getstr (fmt);
11821 if (fmt_str == NULL)
11822 return;
11824 if (!init_target_chars ())
11825 return;
11827 /* If the format doesn't contain % args or %%, we know its size. */
11828 if (strchr (fmt_str, target_percent) == 0)
11829 len = build_int_cstu (size_type_node, strlen (fmt_str));
11830 /* If the format is "%s" and first ... argument is a string literal,
11831 we know it too. */
11832 else if (fcode == BUILT_IN_SPRINTF_CHK
11833 && strcmp (fmt_str, target_percent_s) == 0)
11835 tree arg;
11837 if (nargs < 5)
11838 return;
11839 arg = CALL_EXPR_ARG (exp, 4);
11840 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11841 return;
11843 len = c_strlen (arg, 1);
11844 if (!len || ! host_integerp (len, 1))
11845 return;
11847 else
11848 return;
11850 if (! tree_int_cst_lt (len, size))
11851 warning_at (tree_nonartificial_location (exp),
11852 0, "%Kcall to %D will always overflow destination buffer",
11853 exp, get_callee_fndecl (exp));
11856 /* Emit warning if a free is called with address of a variable. */
11858 static void
11859 maybe_emit_free_warning (tree exp)
11861 tree arg = CALL_EXPR_ARG (exp, 0);
11863 STRIP_NOPS (arg);
11864 if (TREE_CODE (arg) != ADDR_EXPR)
11865 return;
11867 arg = get_base_address (TREE_OPERAND (arg, 0));
11868 if (arg == NULL || INDIRECT_REF_P (arg))
11869 return;
11871 if (SSA_VAR_P (arg))
11872 warning_at (tree_nonartificial_location (exp),
11873 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11874 else
11875 warning_at (tree_nonartificial_location (exp),
11876 0, "%Kattempt to free a non-heap object", exp);
11879 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11880 if possible. */
11882 tree
11883 fold_builtin_object_size (tree ptr, tree ost)
11885 tree ret = NULL_TREE;
11886 int object_size_type;
11888 if (!validate_arg (ptr, POINTER_TYPE)
11889 || !validate_arg (ost, INTEGER_TYPE))
11890 return NULL_TREE;
11892 STRIP_NOPS (ost);
11894 if (TREE_CODE (ost) != INTEGER_CST
11895 || tree_int_cst_sgn (ost) < 0
11896 || compare_tree_int (ost, 3) > 0)
11897 return NULL_TREE;
11899 object_size_type = tree_low_cst (ost, 0);
11901 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11902 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11903 and (size_t) 0 for types 2 and 3. */
11904 if (TREE_SIDE_EFFECTS (ptr))
11905 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11907 if (TREE_CODE (ptr) == ADDR_EXPR)
11908 ret = build_int_cstu (size_type_node,
11909 compute_builtin_object_size (ptr, object_size_type));
11911 else if (TREE_CODE (ptr) == SSA_NAME)
11913 unsigned HOST_WIDE_INT bytes;
11915 /* If object size is not known yet, delay folding until
11916 later. Maybe subsequent passes will help determining
11917 it. */
11918 bytes = compute_builtin_object_size (ptr, object_size_type);
11919 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11920 ? -1 : 0))
11921 ret = build_int_cstu (size_type_node, bytes);
11924 if (ret)
11926 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11927 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11928 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11929 ret = NULL_TREE;
11932 return ret;
11935 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11936 DEST, SRC, LEN, and SIZE are the arguments to the call.
11937 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11938 code of the builtin. If MAXLEN is not NULL, it is maximum length
11939 passed as third argument. */
11941 tree
11942 fold_builtin_memory_chk (location_t loc, tree fndecl,
11943 tree dest, tree src, tree len, tree size,
11944 tree maxlen, bool ignore,
11945 enum built_in_function fcode)
11947 tree fn;
11949 if (!validate_arg (dest, POINTER_TYPE)
11950 || !validate_arg (src,
11951 (fcode == BUILT_IN_MEMSET_CHK
11952 ? INTEGER_TYPE : POINTER_TYPE))
11953 || !validate_arg (len, INTEGER_TYPE)
11954 || !validate_arg (size, INTEGER_TYPE))
11955 return NULL_TREE;
11957 /* If SRC and DEST are the same (and not volatile), return DEST
11958 (resp. DEST+LEN for __mempcpy_chk). */
11959 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11961 if (fcode != BUILT_IN_MEMPCPY_CHK)
11962 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
11963 dest, len);
11964 else
11966 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
11967 dest, len);
11968 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
11972 if (! host_integerp (size, 1))
11973 return NULL_TREE;
11975 if (! integer_all_onesp (size))
11977 if (! host_integerp (len, 1))
11979 /* If LEN is not constant, try MAXLEN too.
11980 For MAXLEN only allow optimizing into non-_ocs function
11981 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11982 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11984 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11986 /* (void) __mempcpy_chk () can be optimized into
11987 (void) __memcpy_chk (). */
11988 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11989 if (!fn)
11990 return NULL_TREE;
11992 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
11994 return NULL_TREE;
11997 else
11998 maxlen = len;
12000 if (tree_int_cst_lt (size, maxlen))
12001 return NULL_TREE;
12004 fn = NULL_TREE;
12005 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12006 mem{cpy,pcpy,move,set} is available. */
12007 switch (fcode)
12009 case BUILT_IN_MEMCPY_CHK:
12010 fn = built_in_decls[BUILT_IN_MEMCPY];
12011 break;
12012 case BUILT_IN_MEMPCPY_CHK:
12013 fn = built_in_decls[BUILT_IN_MEMPCPY];
12014 break;
12015 case BUILT_IN_MEMMOVE_CHK:
12016 fn = built_in_decls[BUILT_IN_MEMMOVE];
12017 break;
12018 case BUILT_IN_MEMSET_CHK:
12019 fn = built_in_decls[BUILT_IN_MEMSET];
12020 break;
12021 default:
12022 break;
12025 if (!fn)
12026 return NULL_TREE;
12028 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12031 /* Fold a call to the __st[rp]cpy_chk builtin.
12032 DEST, SRC, and SIZE are the arguments to the call.
12033 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12034 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12035 strings passed as second argument. */
12037 tree
12038 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12039 tree src, tree size,
12040 tree maxlen, bool ignore,
12041 enum built_in_function fcode)
12043 tree len, fn;
12045 if (!validate_arg (dest, POINTER_TYPE)
12046 || !validate_arg (src, POINTER_TYPE)
12047 || !validate_arg (size, INTEGER_TYPE))
12048 return NULL_TREE;
12050 /* If SRC and DEST are the same (and not volatile), return DEST. */
12051 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12052 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12054 if (! host_integerp (size, 1))
12055 return NULL_TREE;
12057 if (! integer_all_onesp (size))
12059 len = c_strlen (src, 1);
12060 if (! len || ! host_integerp (len, 1))
12062 /* If LEN is not constant, try MAXLEN too.
12063 For MAXLEN only allow optimizing into non-_ocs function
12064 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12065 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12067 if (fcode == BUILT_IN_STPCPY_CHK)
12069 if (! ignore)
12070 return NULL_TREE;
12072 /* If return value of __stpcpy_chk is ignored,
12073 optimize into __strcpy_chk. */
12074 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12075 if (!fn)
12076 return NULL_TREE;
12078 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12081 if (! len || TREE_SIDE_EFFECTS (len))
12082 return NULL_TREE;
12084 /* If c_strlen returned something, but not a constant,
12085 transform __strcpy_chk into __memcpy_chk. */
12086 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12087 if (!fn)
12088 return NULL_TREE;
12090 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12091 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12092 build_call_expr_loc (loc, fn, 4,
12093 dest, src, len, size));
12096 else
12097 maxlen = len;
12099 if (! tree_int_cst_lt (maxlen, size))
12100 return NULL_TREE;
12103 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12104 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12105 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12106 if (!fn)
12107 return NULL_TREE;
12109 return build_call_expr_loc (loc, fn, 2, dest, src);
12112 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12113 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12114 length passed as third argument. */
12116 tree
12117 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12118 tree len, tree size, tree maxlen)
12120 tree fn;
12122 if (!validate_arg (dest, POINTER_TYPE)
12123 || !validate_arg (src, POINTER_TYPE)
12124 || !validate_arg (len, INTEGER_TYPE)
12125 || !validate_arg (size, INTEGER_TYPE))
12126 return NULL_TREE;
12128 if (! host_integerp (size, 1))
12129 return NULL_TREE;
12131 if (! integer_all_onesp (size))
12133 if (! host_integerp (len, 1))
12135 /* If LEN is not constant, try MAXLEN too.
12136 For MAXLEN only allow optimizing into non-_ocs function
12137 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12138 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12139 return NULL_TREE;
12141 else
12142 maxlen = len;
12144 if (tree_int_cst_lt (size, maxlen))
12145 return NULL_TREE;
12148 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12149 fn = built_in_decls[BUILT_IN_STRNCPY];
12150 if (!fn)
12151 return NULL_TREE;
12153 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12156 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12157 are the arguments to the call. */
12159 static tree
12160 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12161 tree src, tree size)
12163 tree fn;
12164 const char *p;
12166 if (!validate_arg (dest, POINTER_TYPE)
12167 || !validate_arg (src, POINTER_TYPE)
12168 || !validate_arg (size, INTEGER_TYPE))
12169 return NULL_TREE;
12171 p = c_getstr (src);
12172 /* If the SRC parameter is "", return DEST. */
12173 if (p && *p == '\0')
12174 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12176 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12177 return NULL_TREE;
12179 /* If __builtin_strcat_chk is used, assume strcat is available. */
12180 fn = built_in_decls[BUILT_IN_STRCAT];
12181 if (!fn)
12182 return NULL_TREE;
12184 return build_call_expr_loc (loc, fn, 2, dest, src);
12187 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12188 LEN, and SIZE. */
12190 static tree
12191 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12192 tree dest, tree src, tree len, tree size)
12194 tree fn;
12195 const char *p;
12197 if (!validate_arg (dest, POINTER_TYPE)
12198 || !validate_arg (src, POINTER_TYPE)
12199 || !validate_arg (size, INTEGER_TYPE)
12200 || !validate_arg (size, INTEGER_TYPE))
12201 return NULL_TREE;
12203 p = c_getstr (src);
12204 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12205 if (p && *p == '\0')
12206 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12207 else if (integer_zerop (len))
12208 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12210 if (! host_integerp (size, 1))
12211 return NULL_TREE;
12213 if (! integer_all_onesp (size))
12215 tree src_len = c_strlen (src, 1);
12216 if (src_len
12217 && host_integerp (src_len, 1)
12218 && host_integerp (len, 1)
12219 && ! tree_int_cst_lt (len, src_len))
12221 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12222 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12223 if (!fn)
12224 return NULL_TREE;
12226 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12228 return NULL_TREE;
12231 /* If __builtin_strncat_chk is used, assume strncat is available. */
12232 fn = built_in_decls[BUILT_IN_STRNCAT];
12233 if (!fn)
12234 return NULL_TREE;
12236 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12239 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12240 a normal call should be emitted rather than expanding the function
12241 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12243 static tree
12244 fold_builtin_sprintf_chk (location_t loc, tree exp,
12245 enum built_in_function fcode)
12247 tree dest, size, len, fn, fmt, flag;
12248 const char *fmt_str;
12249 int nargs = call_expr_nargs (exp);
12251 /* Verify the required arguments in the original call. */
12252 if (nargs < 4)
12253 return NULL_TREE;
12254 dest = CALL_EXPR_ARG (exp, 0);
12255 if (!validate_arg (dest, POINTER_TYPE))
12256 return NULL_TREE;
12257 flag = CALL_EXPR_ARG (exp, 1);
12258 if (!validate_arg (flag, INTEGER_TYPE))
12259 return NULL_TREE;
12260 size = CALL_EXPR_ARG (exp, 2);
12261 if (!validate_arg (size, INTEGER_TYPE))
12262 return NULL_TREE;
12263 fmt = CALL_EXPR_ARG (exp, 3);
12264 if (!validate_arg (fmt, POINTER_TYPE))
12265 return NULL_TREE;
12267 if (! host_integerp (size, 1))
12268 return NULL_TREE;
12270 len = NULL_TREE;
12272 if (!init_target_chars ())
12273 return NULL_TREE;
12275 /* Check whether the format is a literal string constant. */
12276 fmt_str = c_getstr (fmt);
12277 if (fmt_str != NULL)
12279 /* If the format doesn't contain % args or %%, we know the size. */
12280 if (strchr (fmt_str, target_percent) == 0)
12282 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12283 len = build_int_cstu (size_type_node, strlen (fmt_str));
12285 /* If the format is "%s" and first ... argument is a string literal,
12286 we know the size too. */
12287 else if (fcode == BUILT_IN_SPRINTF_CHK
12288 && strcmp (fmt_str, target_percent_s) == 0)
12290 tree arg;
12292 if (nargs == 5)
12294 arg = CALL_EXPR_ARG (exp, 4);
12295 if (validate_arg (arg, POINTER_TYPE))
12297 len = c_strlen (arg, 1);
12298 if (! len || ! host_integerp (len, 1))
12299 len = NULL_TREE;
12305 if (! integer_all_onesp (size))
12307 if (! len || ! tree_int_cst_lt (len, size))
12308 return NULL_TREE;
12311 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12312 or if format doesn't contain % chars or is "%s". */
12313 if (! integer_zerop (flag))
12315 if (fmt_str == NULL)
12316 return NULL_TREE;
12317 if (strchr (fmt_str, target_percent) != NULL
12318 && strcmp (fmt_str, target_percent_s))
12319 return NULL_TREE;
12322 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12323 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12324 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12325 if (!fn)
12326 return NULL_TREE;
12328 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12331 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12332 a normal call should be emitted rather than expanding the function
12333 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12334 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12335 passed as second argument. */
12337 tree
12338 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12339 enum built_in_function fcode)
12341 tree dest, size, len, fn, fmt, flag;
12342 const char *fmt_str;
12344 /* Verify the required arguments in the original call. */
12345 if (call_expr_nargs (exp) < 5)
12346 return NULL_TREE;
12347 dest = CALL_EXPR_ARG (exp, 0);
12348 if (!validate_arg (dest, POINTER_TYPE))
12349 return NULL_TREE;
12350 len = CALL_EXPR_ARG (exp, 1);
12351 if (!validate_arg (len, INTEGER_TYPE))
12352 return NULL_TREE;
12353 flag = CALL_EXPR_ARG (exp, 2);
12354 if (!validate_arg (flag, INTEGER_TYPE))
12355 return NULL_TREE;
12356 size = CALL_EXPR_ARG (exp, 3);
12357 if (!validate_arg (size, INTEGER_TYPE))
12358 return NULL_TREE;
12359 fmt = CALL_EXPR_ARG (exp, 4);
12360 if (!validate_arg (fmt, POINTER_TYPE))
12361 return NULL_TREE;
12363 if (! host_integerp (size, 1))
12364 return NULL_TREE;
12366 if (! integer_all_onesp (size))
12368 if (! host_integerp (len, 1))
12370 /* If LEN is not constant, try MAXLEN too.
12371 For MAXLEN only allow optimizing into non-_ocs function
12372 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12373 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12374 return NULL_TREE;
12376 else
12377 maxlen = len;
12379 if (tree_int_cst_lt (size, maxlen))
12380 return NULL_TREE;
12383 if (!init_target_chars ())
12384 return NULL_TREE;
12386 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12387 or if format doesn't contain % chars or is "%s". */
12388 if (! integer_zerop (flag))
12390 fmt_str = c_getstr (fmt);
12391 if (fmt_str == NULL)
12392 return NULL_TREE;
12393 if (strchr (fmt_str, target_percent) != NULL
12394 && strcmp (fmt_str, target_percent_s))
12395 return NULL_TREE;
12398 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12399 available. */
12400 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12401 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12402 if (!fn)
12403 return NULL_TREE;
12405 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12408 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12409 FMT and ARG are the arguments to the call; we don't fold cases with
12410 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12412 Return NULL_TREE if no simplification was possible, otherwise return the
12413 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12414 code of the function to be simplified. */
12416 static tree
12417 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12418 tree arg, bool ignore,
12419 enum built_in_function fcode)
12421 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12422 const char *fmt_str = NULL;
12424 /* If the return value is used, don't do the transformation. */
12425 if (! ignore)
12426 return NULL_TREE;
12428 /* Verify the required arguments in the original call. */
12429 if (!validate_arg (fmt, POINTER_TYPE))
12430 return NULL_TREE;
12432 /* Check whether the format is a literal string constant. */
12433 fmt_str = c_getstr (fmt);
12434 if (fmt_str == NULL)
12435 return NULL_TREE;
12437 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12439 /* If we're using an unlocked function, assume the other
12440 unlocked functions exist explicitly. */
12441 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12442 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12444 else
12446 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12447 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12450 if (!init_target_chars ())
12451 return NULL_TREE;
12453 if (strcmp (fmt_str, target_percent_s) == 0
12454 || strchr (fmt_str, target_percent) == NULL)
12456 const char *str;
12458 if (strcmp (fmt_str, target_percent_s) == 0)
12460 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12461 return NULL_TREE;
12463 if (!arg || !validate_arg (arg, POINTER_TYPE))
12464 return NULL_TREE;
12466 str = c_getstr (arg);
12467 if (str == NULL)
12468 return NULL_TREE;
12470 else
12472 /* The format specifier doesn't contain any '%' characters. */
12473 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12474 && arg)
12475 return NULL_TREE;
12476 str = fmt_str;
12479 /* If the string was "", printf does nothing. */
12480 if (str[0] == '\0')
12481 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12483 /* If the string has length of 1, call putchar. */
12484 if (str[1] == '\0')
12486 /* Given printf("c"), (where c is any one character,)
12487 convert "c"[0] to an int and pass that to the replacement
12488 function. */
12489 newarg = build_int_cst (NULL_TREE, str[0]);
12490 if (fn_putchar)
12491 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12493 else
12495 /* If the string was "string\n", call puts("string"). */
12496 size_t len = strlen (str);
12497 if ((unsigned char)str[len - 1] == target_newline)
12499 /* Create a NUL-terminated string that's one char shorter
12500 than the original, stripping off the trailing '\n'. */
12501 char *newstr = XALLOCAVEC (char, len);
12502 memcpy (newstr, str, len - 1);
12503 newstr[len - 1] = 0;
12505 newarg = build_string_literal (len, newstr);
12506 if (fn_puts)
12507 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12509 else
12510 /* We'd like to arrange to call fputs(string,stdout) here,
12511 but we need stdout and don't have a way to get it yet. */
12512 return NULL_TREE;
12516 /* The other optimizations can be done only on the non-va_list variants. */
12517 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12518 return NULL_TREE;
12520 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12521 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12523 if (!arg || !validate_arg (arg, POINTER_TYPE))
12524 return NULL_TREE;
12525 if (fn_puts)
12526 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12529 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12530 else if (strcmp (fmt_str, target_percent_c) == 0)
12532 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12533 return NULL_TREE;
12534 if (fn_putchar)
12535 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12538 if (!call)
12539 return NULL_TREE;
12541 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12544 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12545 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12546 more than 3 arguments, and ARG may be null in the 2-argument case.
12548 Return NULL_TREE if no simplification was possible, otherwise return the
12549 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12550 code of the function to be simplified. */
12552 static tree
12553 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12554 tree fmt, tree arg, bool ignore,
12555 enum built_in_function fcode)
12557 tree fn_fputc, fn_fputs, call = NULL_TREE;
12558 const char *fmt_str = NULL;
12560 /* If the return value is used, don't do the transformation. */
12561 if (! ignore)
12562 return NULL_TREE;
12564 /* Verify the required arguments in the original call. */
12565 if (!validate_arg (fp, POINTER_TYPE))
12566 return NULL_TREE;
12567 if (!validate_arg (fmt, POINTER_TYPE))
12568 return NULL_TREE;
12570 /* Check whether the format is a literal string constant. */
12571 fmt_str = c_getstr (fmt);
12572 if (fmt_str == NULL)
12573 return NULL_TREE;
12575 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12577 /* If we're using an unlocked function, assume the other
12578 unlocked functions exist explicitly. */
12579 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12580 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12582 else
12584 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12585 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12588 if (!init_target_chars ())
12589 return NULL_TREE;
12591 /* If the format doesn't contain % args or %%, use strcpy. */
12592 if (strchr (fmt_str, target_percent) == NULL)
12594 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12595 && arg)
12596 return NULL_TREE;
12598 /* If the format specifier was "", fprintf does nothing. */
12599 if (fmt_str[0] == '\0')
12601 /* If FP has side-effects, just wait until gimplification is
12602 done. */
12603 if (TREE_SIDE_EFFECTS (fp))
12604 return NULL_TREE;
12606 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12609 /* When "string" doesn't contain %, replace all cases of
12610 fprintf (fp, string) with fputs (string, fp). The fputs
12611 builtin will take care of special cases like length == 1. */
12612 if (fn_fputs)
12613 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12616 /* The other optimizations can be done only on the non-va_list variants. */
12617 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12618 return NULL_TREE;
12620 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12621 else if (strcmp (fmt_str, target_percent_s) == 0)
12623 if (!arg || !validate_arg (arg, POINTER_TYPE))
12624 return NULL_TREE;
12625 if (fn_fputs)
12626 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12629 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12630 else if (strcmp (fmt_str, target_percent_c) == 0)
12632 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12633 return NULL_TREE;
12634 if (fn_fputc)
12635 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12638 if (!call)
12639 return NULL_TREE;
12640 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12643 /* Initialize format string characters in the target charset. */
12645 static bool
12646 init_target_chars (void)
12648 static bool init;
12649 if (!init)
12651 target_newline = lang_hooks.to_target_charset ('\n');
12652 target_percent = lang_hooks.to_target_charset ('%');
12653 target_c = lang_hooks.to_target_charset ('c');
12654 target_s = lang_hooks.to_target_charset ('s');
12655 if (target_newline == 0 || target_percent == 0 || target_c == 0
12656 || target_s == 0)
12657 return false;
12659 target_percent_c[0] = target_percent;
12660 target_percent_c[1] = target_c;
12661 target_percent_c[2] = '\0';
12663 target_percent_s[0] = target_percent;
12664 target_percent_s[1] = target_s;
12665 target_percent_s[2] = '\0';
12667 target_percent_s_newline[0] = target_percent;
12668 target_percent_s_newline[1] = target_s;
12669 target_percent_s_newline[2] = target_newline;
12670 target_percent_s_newline[3] = '\0';
12672 init = true;
12674 return true;
12677 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12678 and no overflow/underflow occurred. INEXACT is true if M was not
12679 exactly calculated. TYPE is the tree type for the result. This
12680 function assumes that you cleared the MPFR flags and then
12681 calculated M to see if anything subsequently set a flag prior to
12682 entering this function. Return NULL_TREE if any checks fail. */
12684 static tree
12685 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12687 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12688 overflow/underflow occurred. If -frounding-math, proceed iff the
12689 result of calling FUNC was exact. */
12690 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12691 && (!flag_rounding_math || !inexact))
12693 REAL_VALUE_TYPE rr;
12695 real_from_mpfr (&rr, m, type, GMP_RNDN);
12696 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12697 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12698 but the mpft_t is not, then we underflowed in the
12699 conversion. */
12700 if (real_isfinite (&rr)
12701 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12703 REAL_VALUE_TYPE rmode;
12705 real_convert (&rmode, TYPE_MODE (type), &rr);
12706 /* Proceed iff the specified mode can hold the value. */
12707 if (real_identical (&rmode, &rr))
12708 return build_real (type, rmode);
12711 return NULL_TREE;
12714 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12715 number and no overflow/underflow occurred. INEXACT is true if M
12716 was not exactly calculated. TYPE is the tree type for the result.
12717 This function assumes that you cleared the MPFR flags and then
12718 calculated M to see if anything subsequently set a flag prior to
12719 entering this function. Return NULL_TREE if any checks fail, if
12720 FORCE_CONVERT is true, then bypass the checks. */
12722 static tree
12723 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12725 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12726 overflow/underflow occurred. If -frounding-math, proceed iff the
12727 result of calling FUNC was exact. */
12728 if (force_convert
12729 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12730 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12731 && (!flag_rounding_math || !inexact)))
12733 REAL_VALUE_TYPE re, im;
12735 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12736 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12737 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12738 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12739 but the mpft_t is not, then we underflowed in the
12740 conversion. */
12741 if (force_convert
12742 || (real_isfinite (&re) && real_isfinite (&im)
12743 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12744 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12746 REAL_VALUE_TYPE re_mode, im_mode;
12748 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12749 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12750 /* Proceed iff the specified mode can hold the value. */
12751 if (force_convert
12752 || (real_identical (&re_mode, &re)
12753 && real_identical (&im_mode, &im)))
12754 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12755 build_real (TREE_TYPE (type), im_mode));
12758 return NULL_TREE;
12761 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12762 FUNC on it and return the resulting value as a tree with type TYPE.
12763 If MIN and/or MAX are not NULL, then the supplied ARG must be
12764 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12765 acceptable values, otherwise they are not. The mpfr precision is
12766 set to the precision of TYPE. We assume that function FUNC returns
12767 zero if the result could be calculated exactly within the requested
12768 precision. */
12770 static tree
12771 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12772 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12773 bool inclusive)
12775 tree result = NULL_TREE;
12777 STRIP_NOPS (arg);
12779 /* To proceed, MPFR must exactly represent the target floating point
12780 format, which only happens when the target base equals two. */
12781 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12782 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12784 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12786 if (real_isfinite (ra)
12787 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12788 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12790 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12791 const int prec = fmt->p;
12792 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12793 int inexact;
12794 mpfr_t m;
12796 mpfr_init2 (m, prec);
12797 mpfr_from_real (m, ra, GMP_RNDN);
12798 mpfr_clear_flags ();
12799 inexact = func (m, m, rnd);
12800 result = do_mpfr_ckconv (m, type, inexact);
12801 mpfr_clear (m);
12805 return result;
12808 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12809 FUNC on it and return the resulting value as a tree with type TYPE.
12810 The mpfr precision is set to the precision of TYPE. We assume that
12811 function FUNC returns zero if the result could be calculated
12812 exactly within the requested precision. */
12814 static tree
12815 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12816 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12818 tree result = NULL_TREE;
12820 STRIP_NOPS (arg1);
12821 STRIP_NOPS (arg2);
12823 /* To proceed, MPFR must exactly represent the target floating point
12824 format, which only happens when the target base equals two. */
12825 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12826 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12827 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12829 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12830 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12832 if (real_isfinite (ra1) && real_isfinite (ra2))
12834 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12835 const int prec = fmt->p;
12836 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12837 int inexact;
12838 mpfr_t m1, m2;
12840 mpfr_inits2 (prec, m1, m2, NULL);
12841 mpfr_from_real (m1, ra1, GMP_RNDN);
12842 mpfr_from_real (m2, ra2, GMP_RNDN);
12843 mpfr_clear_flags ();
12844 inexact = func (m1, m1, m2, rnd);
12845 result = do_mpfr_ckconv (m1, type, inexact);
12846 mpfr_clears (m1, m2, NULL);
12850 return result;
12853 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12854 FUNC on it and return the resulting value as a tree with type TYPE.
12855 The mpfr precision is set to the precision of TYPE. We assume that
12856 function FUNC returns zero if the result could be calculated
12857 exactly within the requested precision. */
12859 static tree
12860 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12861 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12863 tree result = NULL_TREE;
12865 STRIP_NOPS (arg1);
12866 STRIP_NOPS (arg2);
12867 STRIP_NOPS (arg3);
12869 /* To proceed, MPFR must exactly represent the target floating point
12870 format, which only happens when the target base equals two. */
12871 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12872 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12873 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12874 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12876 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12877 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12878 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12880 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12882 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12883 const int prec = fmt->p;
12884 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12885 int inexact;
12886 mpfr_t m1, m2, m3;
12888 mpfr_inits2 (prec, m1, m2, m3, NULL);
12889 mpfr_from_real (m1, ra1, GMP_RNDN);
12890 mpfr_from_real (m2, ra2, GMP_RNDN);
12891 mpfr_from_real (m3, ra3, GMP_RNDN);
12892 mpfr_clear_flags ();
12893 inexact = func (m1, m1, m2, m3, rnd);
12894 result = do_mpfr_ckconv (m1, type, inexact);
12895 mpfr_clears (m1, m2, m3, NULL);
12899 return result;
12902 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12903 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12904 If ARG_SINP and ARG_COSP are NULL then the result is returned
12905 as a complex value.
12906 The type is taken from the type of ARG and is used for setting the
12907 precision of the calculation and results. */
12909 static tree
12910 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12912 tree const type = TREE_TYPE (arg);
12913 tree result = NULL_TREE;
12915 STRIP_NOPS (arg);
12917 /* To proceed, MPFR must exactly represent the target floating point
12918 format, which only happens when the target base equals two. */
12919 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12920 && TREE_CODE (arg) == REAL_CST
12921 && !TREE_OVERFLOW (arg))
12923 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12925 if (real_isfinite (ra))
12927 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12928 const int prec = fmt->p;
12929 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12930 tree result_s, result_c;
12931 int inexact;
12932 mpfr_t m, ms, mc;
12934 mpfr_inits2 (prec, m, ms, mc, NULL);
12935 mpfr_from_real (m, ra, GMP_RNDN);
12936 mpfr_clear_flags ();
12937 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12938 result_s = do_mpfr_ckconv (ms, type, inexact);
12939 result_c = do_mpfr_ckconv (mc, type, inexact);
12940 mpfr_clears (m, ms, mc, NULL);
12941 if (result_s && result_c)
12943 /* If we are to return in a complex value do so. */
12944 if (!arg_sinp && !arg_cosp)
12945 return build_complex (build_complex_type (type),
12946 result_c, result_s);
12948 /* Dereference the sin/cos pointer arguments. */
12949 arg_sinp = build_fold_indirect_ref (arg_sinp);
12950 arg_cosp = build_fold_indirect_ref (arg_cosp);
12951 /* Proceed if valid pointer type were passed in. */
12952 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12953 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12955 /* Set the values. */
12956 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12957 result_s);
12958 TREE_SIDE_EFFECTS (result_s) = 1;
12959 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12960 result_c);
12961 TREE_SIDE_EFFECTS (result_c) = 1;
12962 /* Combine the assignments into a compound expr. */
12963 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12964 result_s, result_c));
12969 return result;
12972 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12973 two-argument mpfr order N Bessel function FUNC on them and return
12974 the resulting value as a tree with type TYPE. The mpfr precision
12975 is set to the precision of TYPE. We assume that function FUNC
12976 returns zero if the result could be calculated exactly within the
12977 requested precision. */
12978 static tree
12979 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12980 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12981 const REAL_VALUE_TYPE *min, bool inclusive)
12983 tree result = NULL_TREE;
12985 STRIP_NOPS (arg1);
12986 STRIP_NOPS (arg2);
12988 /* To proceed, MPFR must exactly represent the target floating point
12989 format, which only happens when the target base equals two. */
12990 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12991 && host_integerp (arg1, 0)
12992 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12994 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12995 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12997 if (n == (long)n
12998 && real_isfinite (ra)
12999 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13001 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13002 const int prec = fmt->p;
13003 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13004 int inexact;
13005 mpfr_t m;
13007 mpfr_init2 (m, prec);
13008 mpfr_from_real (m, ra, GMP_RNDN);
13009 mpfr_clear_flags ();
13010 inexact = func (m, n, m, rnd);
13011 result = do_mpfr_ckconv (m, type, inexact);
13012 mpfr_clear (m);
13016 return result;
13019 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13020 the pointer *(ARG_QUO) and return the result. The type is taken
13021 from the type of ARG0 and is used for setting the precision of the
13022 calculation and results. */
13024 static tree
13025 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13027 tree const type = TREE_TYPE (arg0);
13028 tree result = NULL_TREE;
13030 STRIP_NOPS (arg0);
13031 STRIP_NOPS (arg1);
13033 /* To proceed, MPFR must exactly represent the target floating point
13034 format, which only happens when the target base equals two. */
13035 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13036 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13037 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13039 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13040 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13042 if (real_isfinite (ra0) && real_isfinite (ra1))
13044 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13045 const int prec = fmt->p;
13046 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13047 tree result_rem;
13048 long integer_quo;
13049 mpfr_t m0, m1;
13051 mpfr_inits2 (prec, m0, m1, NULL);
13052 mpfr_from_real (m0, ra0, GMP_RNDN);
13053 mpfr_from_real (m1, ra1, GMP_RNDN);
13054 mpfr_clear_flags ();
13055 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13056 /* Remquo is independent of the rounding mode, so pass
13057 inexact=0 to do_mpfr_ckconv(). */
13058 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13059 mpfr_clears (m0, m1, NULL);
13060 if (result_rem)
13062 /* MPFR calculates quo in the host's long so it may
13063 return more bits in quo than the target int can hold
13064 if sizeof(host long) > sizeof(target int). This can
13065 happen even for native compilers in LP64 mode. In
13066 these cases, modulo the quo value with the largest
13067 number that the target int can hold while leaving one
13068 bit for the sign. */
13069 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13070 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13072 /* Dereference the quo pointer argument. */
13073 arg_quo = build_fold_indirect_ref (arg_quo);
13074 /* Proceed iff a valid pointer type was passed in. */
13075 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13077 /* Set the value. */
13078 tree result_quo = fold_build2 (MODIFY_EXPR,
13079 TREE_TYPE (arg_quo), arg_quo,
13080 build_int_cst (NULL, integer_quo));
13081 TREE_SIDE_EFFECTS (result_quo) = 1;
13082 /* Combine the quo assignment with the rem. */
13083 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13084 result_quo, result_rem));
13089 return result;
13092 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13093 resulting value as a tree with type TYPE. The mpfr precision is
13094 set to the precision of TYPE. We assume that this mpfr function
13095 returns zero if the result could be calculated exactly within the
13096 requested precision. In addition, the integer pointer represented
13097 by ARG_SG will be dereferenced and set to the appropriate signgam
13098 (-1,1) value. */
13100 static tree
13101 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13103 tree result = NULL_TREE;
13105 STRIP_NOPS (arg);
13107 /* To proceed, MPFR must exactly represent the target floating point
13108 format, which only happens when the target base equals two. Also
13109 verify ARG is a constant and that ARG_SG is an int pointer. */
13110 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13111 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13112 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13113 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13115 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13117 /* In addition to NaN and Inf, the argument cannot be zero or a
13118 negative integer. */
13119 if (real_isfinite (ra)
13120 && ra->cl != rvc_zero
13121 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13123 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13124 const int prec = fmt->p;
13125 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13126 int inexact, sg;
13127 mpfr_t m;
13128 tree result_lg;
13130 mpfr_init2 (m, prec);
13131 mpfr_from_real (m, ra, GMP_RNDN);
13132 mpfr_clear_flags ();
13133 inexact = mpfr_lgamma (m, &sg, m, rnd);
13134 result_lg = do_mpfr_ckconv (m, type, inexact);
13135 mpfr_clear (m);
13136 if (result_lg)
13138 tree result_sg;
13140 /* Dereference the arg_sg pointer argument. */
13141 arg_sg = build_fold_indirect_ref (arg_sg);
13142 /* Assign the signgam value into *arg_sg. */
13143 result_sg = fold_build2 (MODIFY_EXPR,
13144 TREE_TYPE (arg_sg), arg_sg,
13145 build_int_cst (NULL, sg));
13146 TREE_SIDE_EFFECTS (result_sg) = 1;
13147 /* Combine the signgam assignment with the lgamma result. */
13148 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13149 result_sg, result_lg));
13154 return result;
13157 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13158 function FUNC on it and return the resulting value as a tree with
13159 type TYPE. The mpfr precision is set to the precision of TYPE. We
13160 assume that function FUNC returns zero if the result could be
13161 calculated exactly within the requested precision. */
13163 static tree
13164 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13166 tree result = NULL_TREE;
13168 STRIP_NOPS (arg);
13170 /* To proceed, MPFR must exactly represent the target floating point
13171 format, which only happens when the target base equals two. */
13172 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13173 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13174 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13176 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13177 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13179 if (real_isfinite (re) && real_isfinite (im))
13181 const struct real_format *const fmt =
13182 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13183 const int prec = fmt->p;
13184 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13185 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13186 int inexact;
13187 mpc_t m;
13189 mpc_init2 (m, prec);
13190 mpfr_from_real (mpc_realref(m), re, rnd);
13191 mpfr_from_real (mpc_imagref(m), im, rnd);
13192 mpfr_clear_flags ();
13193 inexact = func (m, m, crnd);
13194 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13195 mpc_clear (m);
13199 return result;
13202 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13203 mpc function FUNC on it and return the resulting value as a tree
13204 with type TYPE. The mpfr precision is set to the precision of
13205 TYPE. We assume that function FUNC returns zero if the result
13206 could be calculated exactly within the requested precision. If
13207 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13208 in the arguments and/or results. */
13210 tree
13211 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13212 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13214 tree result = NULL_TREE;
13216 STRIP_NOPS (arg0);
13217 STRIP_NOPS (arg1);
13219 /* To proceed, MPFR must exactly represent the target floating point
13220 format, which only happens when the target base equals two. */
13221 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13222 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13223 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13224 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13225 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13227 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13228 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13229 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13230 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13232 if (do_nonfinite
13233 || (real_isfinite (re0) && real_isfinite (im0)
13234 && real_isfinite (re1) && real_isfinite (im1)))
13236 const struct real_format *const fmt =
13237 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13238 const int prec = fmt->p;
13239 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13240 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13241 int inexact;
13242 mpc_t m0, m1;
13244 mpc_init2 (m0, prec);
13245 mpc_init2 (m1, prec);
13246 mpfr_from_real (mpc_realref(m0), re0, rnd);
13247 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13248 mpfr_from_real (mpc_realref(m1), re1, rnd);
13249 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13250 mpfr_clear_flags ();
13251 inexact = func (m0, m0, m1, crnd);
13252 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13253 mpc_clear (m0);
13254 mpc_clear (m1);
13258 return result;
13261 /* FIXME tuples.
13262 The functions below provide an alternate interface for folding
13263 builtin function calls presented as GIMPLE_CALL statements rather
13264 than as CALL_EXPRs. The folded result is still expressed as a
13265 tree. There is too much code duplication in the handling of
13266 varargs functions, and a more intrusive re-factoring would permit
13267 better sharing of code between the tree and statement-based
13268 versions of these functions. */
13270 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13271 along with N new arguments specified as the "..." parameters. SKIP
13272 is the number of arguments in STMT to be omitted. This function is used
13273 to do varargs-to-varargs transformations. */
13275 static tree
13276 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13278 int oldnargs = gimple_call_num_args (stmt);
13279 int nargs = oldnargs - skip + n;
13280 tree fntype = TREE_TYPE (fndecl);
13281 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13282 tree *buffer;
13283 int i, j;
13284 va_list ap;
13285 location_t loc = gimple_location (stmt);
13287 buffer = XALLOCAVEC (tree, nargs);
13288 va_start (ap, n);
13289 for (i = 0; i < n; i++)
13290 buffer[i] = va_arg (ap, tree);
13291 va_end (ap);
13292 for (j = skip; j < oldnargs; j++, i++)
13293 buffer[i] = gimple_call_arg (stmt, j);
13295 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13298 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13299 a normal call should be emitted rather than expanding the function
13300 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13302 static tree
13303 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13305 tree dest, size, len, fn, fmt, flag;
13306 const char *fmt_str;
13307 int nargs = gimple_call_num_args (stmt);
13309 /* Verify the required arguments in the original call. */
13310 if (nargs < 4)
13311 return NULL_TREE;
13312 dest = gimple_call_arg (stmt, 0);
13313 if (!validate_arg (dest, POINTER_TYPE))
13314 return NULL_TREE;
13315 flag = gimple_call_arg (stmt, 1);
13316 if (!validate_arg (flag, INTEGER_TYPE))
13317 return NULL_TREE;
13318 size = gimple_call_arg (stmt, 2);
13319 if (!validate_arg (size, INTEGER_TYPE))
13320 return NULL_TREE;
13321 fmt = gimple_call_arg (stmt, 3);
13322 if (!validate_arg (fmt, POINTER_TYPE))
13323 return NULL_TREE;
13325 if (! host_integerp (size, 1))
13326 return NULL_TREE;
13328 len = NULL_TREE;
13330 if (!init_target_chars ())
13331 return NULL_TREE;
13333 /* Check whether the format is a literal string constant. */
13334 fmt_str = c_getstr (fmt);
13335 if (fmt_str != NULL)
13337 /* If the format doesn't contain % args or %%, we know the size. */
13338 if (strchr (fmt_str, target_percent) == 0)
13340 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13341 len = build_int_cstu (size_type_node, strlen (fmt_str));
13343 /* If the format is "%s" and first ... argument is a string literal,
13344 we know the size too. */
13345 else if (fcode == BUILT_IN_SPRINTF_CHK
13346 && strcmp (fmt_str, target_percent_s) == 0)
13348 tree arg;
13350 if (nargs == 5)
13352 arg = gimple_call_arg (stmt, 4);
13353 if (validate_arg (arg, POINTER_TYPE))
13355 len = c_strlen (arg, 1);
13356 if (! len || ! host_integerp (len, 1))
13357 len = NULL_TREE;
13363 if (! integer_all_onesp (size))
13365 if (! len || ! tree_int_cst_lt (len, size))
13366 return NULL_TREE;
13369 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13370 or if format doesn't contain % chars or is "%s". */
13371 if (! integer_zerop (flag))
13373 if (fmt_str == NULL)
13374 return NULL_TREE;
13375 if (strchr (fmt_str, target_percent) != NULL
13376 && strcmp (fmt_str, target_percent_s))
13377 return NULL_TREE;
13380 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13381 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13382 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13383 if (!fn)
13384 return NULL_TREE;
13386 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13389 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13390 a normal call should be emitted rather than expanding the function
13391 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13392 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13393 passed as second argument. */
13395 tree
13396 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13397 enum built_in_function fcode)
13399 tree dest, size, len, fn, fmt, flag;
13400 const char *fmt_str;
13402 /* Verify the required arguments in the original call. */
13403 if (gimple_call_num_args (stmt) < 5)
13404 return NULL_TREE;
13405 dest = gimple_call_arg (stmt, 0);
13406 if (!validate_arg (dest, POINTER_TYPE))
13407 return NULL_TREE;
13408 len = gimple_call_arg (stmt, 1);
13409 if (!validate_arg (len, INTEGER_TYPE))
13410 return NULL_TREE;
13411 flag = gimple_call_arg (stmt, 2);
13412 if (!validate_arg (flag, INTEGER_TYPE))
13413 return NULL_TREE;
13414 size = gimple_call_arg (stmt, 3);
13415 if (!validate_arg (size, INTEGER_TYPE))
13416 return NULL_TREE;
13417 fmt = gimple_call_arg (stmt, 4);
13418 if (!validate_arg (fmt, POINTER_TYPE))
13419 return NULL_TREE;
13421 if (! host_integerp (size, 1))
13422 return NULL_TREE;
13424 if (! integer_all_onesp (size))
13426 if (! host_integerp (len, 1))
13428 /* If LEN is not constant, try MAXLEN too.
13429 For MAXLEN only allow optimizing into non-_ocs function
13430 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13431 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13432 return NULL_TREE;
13434 else
13435 maxlen = len;
13437 if (tree_int_cst_lt (size, maxlen))
13438 return NULL_TREE;
13441 if (!init_target_chars ())
13442 return NULL_TREE;
13444 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13445 or if format doesn't contain % chars or is "%s". */
13446 if (! integer_zerop (flag))
13448 fmt_str = c_getstr (fmt);
13449 if (fmt_str == NULL)
13450 return NULL_TREE;
13451 if (strchr (fmt_str, target_percent) != NULL
13452 && strcmp (fmt_str, target_percent_s))
13453 return NULL_TREE;
13456 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13457 available. */
13458 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13459 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13460 if (!fn)
13461 return NULL_TREE;
13463 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13466 /* Builtins with folding operations that operate on "..." arguments
13467 need special handling; we need to store the arguments in a convenient
13468 data structure before attempting any folding. Fortunately there are
13469 only a few builtins that fall into this category. FNDECL is the
13470 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13471 result of the function call is ignored. */
13473 static tree
13474 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13475 bool ignore ATTRIBUTE_UNUSED)
13477 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13478 tree ret = NULL_TREE;
13480 switch (fcode)
13482 case BUILT_IN_SPRINTF_CHK:
13483 case BUILT_IN_VSPRINTF_CHK:
13484 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13485 break;
13487 case BUILT_IN_SNPRINTF_CHK:
13488 case BUILT_IN_VSNPRINTF_CHK:
13489 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13491 default:
13492 break;
13494 if (ret)
13496 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13497 TREE_NO_WARNING (ret) = 1;
13498 return ret;
13500 return NULL_TREE;
13503 /* A wrapper function for builtin folding that prevents warnings for
13504 "statement without effect" and the like, caused by removing the
13505 call node earlier than the warning is generated. */
13507 tree
13508 fold_call_stmt (gimple stmt, bool ignore)
13510 tree ret = NULL_TREE;
13511 tree fndecl = gimple_call_fndecl (stmt);
13512 location_t loc = gimple_location (stmt);
13513 if (fndecl
13514 && TREE_CODE (fndecl) == FUNCTION_DECL
13515 && DECL_BUILT_IN (fndecl)
13516 && !gimple_call_va_arg_pack_p (stmt))
13518 int nargs = gimple_call_num_args (stmt);
13520 if (avoid_folding_inline_builtin (fndecl))
13521 return NULL_TREE;
13522 /* FIXME: Don't use a list in this interface. */
13523 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13525 tree arglist = NULL_TREE;
13526 int i;
13527 for (i = nargs - 1; i >= 0; i--)
13528 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13529 return targetm.fold_builtin (fndecl, arglist, ignore);
13531 else
13533 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13535 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13536 int i;
13537 for (i = 0; i < nargs; i++)
13538 args[i] = gimple_call_arg (stmt, i);
13539 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13541 if (!ret)
13542 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13543 if (ret)
13545 /* Propagate location information from original call to
13546 expansion of builtin. Otherwise things like
13547 maybe_emit_chk_warning, that operate on the expansion
13548 of a builtin, will use the wrong location information. */
13549 if (gimple_has_location (stmt))
13551 tree realret = ret;
13552 if (TREE_CODE (ret) == NOP_EXPR)
13553 realret = TREE_OPERAND (ret, 0);
13554 if (CAN_HAVE_LOCATION_P (realret)
13555 && !EXPR_HAS_LOCATION (realret))
13556 SET_EXPR_LOCATION (realret, loc);
13557 return realret;
13559 return ret;
13563 return NULL_TREE;
13566 /* Look up the function in built_in_decls that corresponds to DECL
13567 and set ASMSPEC as its user assembler name. DECL must be a
13568 function decl that declares a builtin. */
13570 void
13571 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13573 tree builtin;
13574 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13575 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13576 && asmspec != 0);
13578 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13579 set_user_assembler_name (builtin, asmspec);
13580 switch (DECL_FUNCTION_CODE (decl))
13582 case BUILT_IN_MEMCPY:
13583 init_block_move_fn (asmspec);
13584 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13585 break;
13586 case BUILT_IN_MEMSET:
13587 init_block_clear_fn (asmspec);
13588 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13589 break;
13590 case BUILT_IN_MEMMOVE:
13591 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13592 break;
13593 case BUILT_IN_MEMCMP:
13594 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13595 break;
13596 case BUILT_IN_ABORT:
13597 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13598 break;
13599 case BUILT_IN_FFS:
13600 if (INT_TYPE_SIZE < BITS_PER_WORD)
13602 set_user_assembler_libfunc ("ffs", asmspec);
13603 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13604 MODE_INT, 0), "ffs");
13606 break;
13607 default:
13608 break;