Correct typos.
[official-gcc.git] / gcc / builtins.c
blob7b24a0ce70395fac8d23069dbdb8ddc231fc5645
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
52 #include "builtins.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
57 #endif
58 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
60 struct target_builtins default_target_builtins;
61 #if SWITCHABLE_TARGET
62 struct target_builtins *this_target_builtins = &default_target_builtins;
63 #endif
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
74 #undef DEF_BUILTIN
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
92 #endif
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
106 static rtx expand_builtin_interclass_mathfn (tree, rtx);
107 static rtx expand_builtin_sincos (tree);
108 static rtx expand_builtin_cexpi (tree, rtx);
109 static rtx expand_builtin_int_roundingfn (tree, rtx);
110 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcmp (tree, rtx);
117 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_memcpy (tree, rtx);
120 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
122 enum machine_mode, int);
123 static rtx expand_builtin_strcpy (tree, rtx);
124 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
125 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strncpy (tree, rtx);
127 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
128 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
130 static rtx expand_builtin_bzero (tree);
131 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_alloca (tree, bool);
133 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
134 static rtx expand_builtin_frame_address (tree, tree);
135 static tree stabilize_va_list_loc (location_t, tree, int);
136 static rtx expand_builtin_expect (tree, rtx);
137 static tree fold_builtin_constant_p (tree);
138 static tree fold_builtin_expect (location_t, tree, tree);
139 static tree fold_builtin_classify_type (tree);
140 static tree fold_builtin_strlen (location_t, tree, tree);
141 static tree fold_builtin_inf (location_t, tree, int);
142 static tree fold_builtin_nan (tree, tree, int);
143 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
144 static bool validate_arg (const_tree, enum tree_code code);
145 static bool integer_valued_real_p (tree);
146 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
147 static bool readonly_data_expr (tree);
148 static rtx expand_builtin_fabs (tree, rtx, rtx);
149 static rtx expand_builtin_signbit (tree, rtx);
150 static tree fold_builtin_sqrt (location_t, tree, tree);
151 static tree fold_builtin_cbrt (location_t, tree, tree);
152 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
153 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
154 static tree fold_builtin_cos (location_t, tree, tree, tree);
155 static tree fold_builtin_cosh (location_t, tree, tree, tree);
156 static tree fold_builtin_tan (tree, tree);
157 static tree fold_builtin_trunc (location_t, tree, tree);
158 static tree fold_builtin_floor (location_t, tree, tree);
159 static tree fold_builtin_ceil (location_t, tree, tree);
160 static tree fold_builtin_round (location_t, tree, tree);
161 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
162 static tree fold_builtin_bitop (tree, tree);
163 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
164 static tree fold_builtin_strchr (location_t, tree, tree, tree);
165 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
167 static tree fold_builtin_strcmp (location_t, tree, tree);
168 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
169 static tree fold_builtin_signbit (location_t, tree, tree);
170 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_isascii (location_t, tree);
172 static tree fold_builtin_toascii (location_t, tree);
173 static tree fold_builtin_isdigit (location_t, tree);
174 static tree fold_builtin_fabs (location_t, tree, tree);
175 static tree fold_builtin_abs (location_t, tree, tree);
176 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
177 enum tree_code);
178 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
179 static tree fold_builtin_0 (location_t, tree, bool);
180 static tree fold_builtin_1 (location_t, tree, tree, bool);
181 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
182 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
183 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
184 static tree fold_builtin_varargs (location_t, tree, tree, bool);
186 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
187 static tree fold_builtin_strstr (location_t, tree, tree, tree);
188 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
189 static tree fold_builtin_strcat (location_t, tree, tree);
190 static tree fold_builtin_strncat (location_t, tree, tree, tree);
191 static tree fold_builtin_strspn (location_t, tree, tree);
192 static tree fold_builtin_strcspn (location_t, tree, tree);
193 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
194 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
196 static rtx expand_builtin_object_size (tree);
197 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
198 enum built_in_function);
199 static void maybe_emit_chk_warning (tree, enum built_in_function);
200 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
201 static void maybe_emit_free_warning (tree);
202 static tree fold_builtin_object_size (tree, tree);
203 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
204 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
205 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
206 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
207 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
208 enum built_in_function);
209 static bool init_target_chars (void);
211 static unsigned HOST_WIDE_INT target_newline;
212 static unsigned HOST_WIDE_INT target_percent;
213 static unsigned HOST_WIDE_INT target_c;
214 static unsigned HOST_WIDE_INT target_s;
215 static char target_percent_c[3];
216 static char target_percent_s[3];
217 static char target_percent_s_newline[4];
218 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
219 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
220 static tree do_mpfr_arg2 (tree, tree, tree,
221 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
222 static tree do_mpfr_arg3 (tree, tree, tree, tree,
223 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
224 static tree do_mpfr_sincos (tree, tree, tree);
225 static tree do_mpfr_bessel_n (tree, tree, tree,
226 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_remquo (tree, tree, tree);
229 static tree do_mpfr_lgamma_r (tree, tree, tree);
231 /* Return true if NAME starts with __builtin_ or __sync_. */
233 bool
234 is_builtin_name (const char *name)
236 if (strncmp (name, "__builtin_", 10) == 0)
237 return true;
238 if (strncmp (name, "__sync_", 7) == 0)
239 return true;
240 return false;
244 /* Return true if DECL is a function symbol representing a built-in. */
246 bool
247 is_builtin_fn (tree decl)
249 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
253 /* Return true if NODE should be considered for inline expansion regardless
254 of the optimization level. This means whenever a function is invoked with
255 its "internal" name, which normally contains the prefix "__builtin". */
257 static bool
258 called_as_built_in (tree node)
260 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
261 we want the name used to call the function, not the name it
262 will have. */
263 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
264 return is_builtin_name (name);
267 /* Return the alignment in bits of EXP, an object.
268 Don't return more than MAX_ALIGN no matter what. */
270 unsigned int
271 get_object_alignment_1 (tree exp, unsigned HOST_WIDE_INT *bitposp)
273 HOST_WIDE_INT bitsize, bitpos;
274 tree offset;
275 enum machine_mode mode;
276 int unsignedp, volatilep;
277 unsigned int align, inner;
279 /* Get the innermost object and the constant (bitpos) and possibly
280 variable (offset) offset of the access. */
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
284 /* Extract alignment information from the innermost object and
285 possibly adjust bitpos and offset. */
286 if (TREE_CODE (exp) == CONST_DECL)
287 exp = DECL_INITIAL (exp);
288 if (DECL_P (exp)
289 && TREE_CODE (exp) != LABEL_DECL)
290 align = DECL_ALIGN (exp);
291 else if (CONSTANT_CLASS_P (exp))
293 align = TYPE_ALIGN (TREE_TYPE (exp));
294 #ifdef CONSTANT_ALIGNMENT
295 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
296 #endif
298 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
299 align = TYPE_ALIGN (TREE_TYPE (exp));
300 else if (TREE_CODE (exp) == INDIRECT_REF)
301 align = TYPE_ALIGN (TREE_TYPE (exp));
302 else if (TREE_CODE (exp) == MEM_REF)
304 tree addr = TREE_OPERAND (exp, 0);
305 struct ptr_info_def *pi;
306 if (TREE_CODE (addr) == BIT_AND_EXPR
307 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
309 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
310 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
311 align *= BITS_PER_UNIT;
312 addr = TREE_OPERAND (addr, 0);
314 else
315 align = BITS_PER_UNIT;
316 if (TREE_CODE (addr) == SSA_NAME
317 && (pi = SSA_NAME_PTR_INFO (addr)))
319 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
320 align = MAX (pi->align * BITS_PER_UNIT, align);
322 else if (TREE_CODE (addr) == ADDR_EXPR)
323 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0), ~0U));
324 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
326 else if (TREE_CODE (exp) == TARGET_MEM_REF)
328 struct ptr_info_def *pi;
329 tree addr = TMR_BASE (exp);
330 if (TREE_CODE (addr) == BIT_AND_EXPR
331 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
333 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
334 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
335 align *= BITS_PER_UNIT;
336 addr = TREE_OPERAND (addr, 0);
338 else
339 align = BITS_PER_UNIT;
340 if (TREE_CODE (addr) == SSA_NAME
341 && (pi = SSA_NAME_PTR_INFO (addr)))
343 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
344 align = MAX (pi->align * BITS_PER_UNIT, align);
346 else if (TREE_CODE (addr) == ADDR_EXPR)
347 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0), ~0U));
348 if (TMR_OFFSET (exp))
349 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
350 if (TMR_INDEX (exp) && TMR_STEP (exp))
352 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
353 align = MIN (align, (step & -step) * BITS_PER_UNIT);
355 else if (TMR_INDEX (exp))
356 align = BITS_PER_UNIT;
357 if (TMR_INDEX2 (exp))
358 align = BITS_PER_UNIT;
360 else
361 align = BITS_PER_UNIT;
363 /* If there is a non-constant offset part extract the maximum
364 alignment that can prevail. */
365 inner = ~0U;
366 while (offset)
368 tree next_offset;
370 if (TREE_CODE (offset) == PLUS_EXPR)
372 next_offset = TREE_OPERAND (offset, 0);
373 offset = TREE_OPERAND (offset, 1);
375 else
376 next_offset = NULL;
377 if (host_integerp (offset, 1))
379 /* Any overflow in calculating offset_bits won't change
380 the alignment. */
381 unsigned offset_bits
382 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
384 if (offset_bits)
385 inner = MIN (inner, (offset_bits & -offset_bits));
387 else if (TREE_CODE (offset) == MULT_EXPR
388 && host_integerp (TREE_OPERAND (offset, 1), 1))
390 /* Any overflow in calculating offset_factor won't change
391 the alignment. */
392 unsigned offset_factor
393 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
394 * BITS_PER_UNIT);
396 if (offset_factor)
397 inner = MIN (inner, (offset_factor & -offset_factor));
399 else
401 inner = MIN (inner, BITS_PER_UNIT);
402 break;
404 offset = next_offset;
407 /* Alignment is innermost object alignment adjusted by the constant
408 and non-constant offset parts. */
409 align = MIN (align, inner);
410 bitpos = bitpos & (align - 1);
412 *bitposp = bitpos;
413 return align;
416 /* Return the alignment in bits of EXP, an object.
417 Don't return more than MAX_ALIGN no matter what. */
419 unsigned int
420 get_object_alignment (tree exp, unsigned int max_align)
422 unsigned HOST_WIDE_INT bitpos = 0;
423 unsigned int align;
425 align = get_object_alignment_1 (exp, &bitpos);
427 /* align and bitpos now specify known low bits of the pointer.
428 ptr & (align - 1) == bitpos. */
430 if (bitpos != 0)
431 align = (bitpos & -bitpos);
433 return MIN (align, max_align);
436 /* Returns true iff we can trust that alignment information has been
437 calculated properly. */
439 bool
440 can_trust_pointer_alignment (void)
442 /* We rely on TER to compute accurate alignment information. */
443 return (optimize && flag_tree_ter);
446 /* Return the alignment in bits of EXP, a pointer valued expression.
447 But don't return more than MAX_ALIGN no matter what.
448 The alignment returned is, by default, the alignment of the thing that
449 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
451 Otherwise, look at the expression to see if we can do better, i.e., if the
452 expression is actually pointing at an object whose alignment is tighter. */
454 unsigned int
455 get_pointer_alignment (tree exp, unsigned int max_align)
457 STRIP_NOPS (exp);
459 if (TREE_CODE (exp) == ADDR_EXPR)
460 return get_object_alignment (TREE_OPERAND (exp, 0), max_align);
461 else if (TREE_CODE (exp) == SSA_NAME
462 && POINTER_TYPE_P (TREE_TYPE (exp)))
464 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
465 unsigned align;
466 if (!pi)
467 return BITS_PER_UNIT;
468 if (pi->misalign != 0)
469 align = (pi->misalign & -pi->misalign);
470 else
471 align = pi->align;
472 return MIN (max_align, align * BITS_PER_UNIT);
475 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
478 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
479 way, because it could contain a zero byte in the middle.
480 TREE_STRING_LENGTH is the size of the character array, not the string.
482 ONLY_VALUE should be nonzero if the result is not going to be emitted
483 into the instruction stream and zero if it is going to be expanded.
484 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
485 is returned, otherwise NULL, since
486 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
487 evaluate the side-effects.
489 The value returned is of type `ssizetype'.
491 Unfortunately, string_constant can't access the values of const char
492 arrays with initializers, so neither can we do so here. */
494 tree
495 c_strlen (tree src, int only_value)
497 tree offset_node;
498 HOST_WIDE_INT offset;
499 int max;
500 const char *ptr;
501 location_t loc;
503 STRIP_NOPS (src);
504 if (TREE_CODE (src) == COND_EXPR
505 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
507 tree len1, len2;
509 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
510 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
511 if (tree_int_cst_equal (len1, len2))
512 return len1;
515 if (TREE_CODE (src) == COMPOUND_EXPR
516 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
517 return c_strlen (TREE_OPERAND (src, 1), only_value);
519 loc = EXPR_LOC_OR_HERE (src);
521 src = string_constant (src, &offset_node);
522 if (src == 0)
523 return NULL_TREE;
525 max = TREE_STRING_LENGTH (src) - 1;
526 ptr = TREE_STRING_POINTER (src);
528 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
530 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
531 compute the offset to the following null if we don't know where to
532 start searching for it. */
533 int i;
535 for (i = 0; i < max; i++)
536 if (ptr[i] == 0)
537 return NULL_TREE;
539 /* We don't know the starting offset, but we do know that the string
540 has no internal zero bytes. We can assume that the offset falls
541 within the bounds of the string; otherwise, the programmer deserves
542 what he gets. Subtract the offset from the length of the string,
543 and return that. This would perhaps not be valid if we were dealing
544 with named arrays in addition to literal string constants. */
546 return size_diffop_loc (loc, size_int (max), offset_node);
549 /* We have a known offset into the string. Start searching there for
550 a null character if we can represent it as a single HOST_WIDE_INT. */
551 if (offset_node == 0)
552 offset = 0;
553 else if (! host_integerp (offset_node, 0))
554 offset = -1;
555 else
556 offset = tree_low_cst (offset_node, 0);
558 /* If the offset is known to be out of bounds, warn, and call strlen at
559 runtime. */
560 if (offset < 0 || offset > max)
562 /* Suppress multiple warnings for propagated constant strings. */
563 if (! TREE_NO_WARNING (src))
565 warning_at (loc, 0, "offset outside bounds of constant string");
566 TREE_NO_WARNING (src) = 1;
568 return NULL_TREE;
571 /* Use strlen to search for the first zero byte. Since any strings
572 constructed with build_string will have nulls appended, we win even
573 if we get handed something like (char[4])"abcd".
575 Since OFFSET is our starting index into the string, no further
576 calculation is needed. */
577 return ssize_int (strlen (ptr + offset));
580 /* Return a char pointer for a C string if it is a string constant
581 or sum of string constant and integer constant. */
583 static const char *
584 c_getstr (tree src)
586 tree offset_node;
588 src = string_constant (src, &offset_node);
589 if (src == 0)
590 return 0;
592 if (offset_node == 0)
593 return TREE_STRING_POINTER (src);
594 else if (!host_integerp (offset_node, 1)
595 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
596 return 0;
598 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
601 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
602 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
604 static rtx
605 c_readstr (const char *str, enum machine_mode mode)
607 HOST_WIDE_INT c[2];
608 HOST_WIDE_INT ch;
609 unsigned int i, j;
611 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
613 c[0] = 0;
614 c[1] = 0;
615 ch = 1;
616 for (i = 0; i < GET_MODE_SIZE (mode); i++)
618 j = i;
619 if (WORDS_BIG_ENDIAN)
620 j = GET_MODE_SIZE (mode) - i - 1;
621 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
622 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
623 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
624 j *= BITS_PER_UNIT;
625 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
627 if (ch)
628 ch = (unsigned char) str[i];
629 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
631 return immed_double_const (c[0], c[1], mode);
634 /* Cast a target constant CST to target CHAR and if that value fits into
635 host char type, return zero and put that value into variable pointed to by
636 P. */
638 static int
639 target_char_cast (tree cst, char *p)
641 unsigned HOST_WIDE_INT val, hostval;
643 if (TREE_CODE (cst) != INTEGER_CST
644 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
645 return 1;
647 val = TREE_INT_CST_LOW (cst);
648 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
649 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
651 hostval = val;
652 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
653 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
655 if (val != hostval)
656 return 1;
658 *p = hostval;
659 return 0;
662 /* Similar to save_expr, but assumes that arbitrary code is not executed
663 in between the multiple evaluations. In particular, we assume that a
664 non-addressable local variable will not be modified. */
666 static tree
667 builtin_save_expr (tree exp)
669 if (TREE_CODE (exp) == SSA_NAME
670 || (TREE_ADDRESSABLE (exp) == 0
671 && (TREE_CODE (exp) == PARM_DECL
672 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
673 return exp;
675 return save_expr (exp);
678 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
679 times to get the address of either a higher stack frame, or a return
680 address located within it (depending on FNDECL_CODE). */
682 static rtx
683 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
685 int i;
687 #ifdef INITIAL_FRAME_ADDRESS_RTX
688 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
689 #else
690 rtx tem;
692 /* For a zero count with __builtin_return_address, we don't care what
693 frame address we return, because target-specific definitions will
694 override us. Therefore frame pointer elimination is OK, and using
695 the soft frame pointer is OK.
697 For a nonzero count, or a zero count with __builtin_frame_address,
698 we require a stable offset from the current frame pointer to the
699 previous one, so we must use the hard frame pointer, and
700 we must disable frame pointer elimination. */
701 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
702 tem = frame_pointer_rtx;
703 else
705 tem = hard_frame_pointer_rtx;
707 /* Tell reload not to eliminate the frame pointer. */
708 crtl->accesses_prior_frames = 1;
710 #endif
712 /* Some machines need special handling before we can access
713 arbitrary frames. For example, on the SPARC, we must first flush
714 all register windows to the stack. */
715 #ifdef SETUP_FRAME_ADDRESSES
716 if (count > 0)
717 SETUP_FRAME_ADDRESSES ();
718 #endif
720 /* On the SPARC, the return address is not in the frame, it is in a
721 register. There is no way to access it off of the current frame
722 pointer, but it can be accessed off the previous frame pointer by
723 reading the value from the register window save area. */
724 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
725 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
726 count--;
727 #endif
729 /* Scan back COUNT frames to the specified frame. */
730 for (i = 0; i < count; i++)
732 /* Assume the dynamic chain pointer is in the word that the
733 frame address points to, unless otherwise specified. */
734 #ifdef DYNAMIC_CHAIN_ADDRESS
735 tem = DYNAMIC_CHAIN_ADDRESS (tem);
736 #endif
737 tem = memory_address (Pmode, tem);
738 tem = gen_frame_mem (Pmode, tem);
739 tem = copy_to_reg (tem);
742 /* For __builtin_frame_address, return what we've got. But, on
743 the SPARC for example, we may have to add a bias. */
744 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
745 #ifdef FRAME_ADDR_RTX
746 return FRAME_ADDR_RTX (tem);
747 #else
748 return tem;
749 #endif
751 /* For __builtin_return_address, get the return address from that frame. */
752 #ifdef RETURN_ADDR_RTX
753 tem = RETURN_ADDR_RTX (count, tem);
754 #else
755 tem = memory_address (Pmode,
756 plus_constant (tem, GET_MODE_SIZE (Pmode)));
757 tem = gen_frame_mem (Pmode, tem);
758 #endif
759 return tem;
762 /* Alias set used for setjmp buffer. */
763 static alias_set_type setjmp_alias_set = -1;
765 /* Construct the leading half of a __builtin_setjmp call. Control will
766 return to RECEIVER_LABEL. This is also called directly by the SJLJ
767 exception handling code. */
769 void
770 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
772 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
773 rtx stack_save;
774 rtx mem;
776 if (setjmp_alias_set == -1)
777 setjmp_alias_set = new_alias_set ();
779 buf_addr = convert_memory_address (Pmode, buf_addr);
781 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
783 /* We store the frame pointer and the address of receiver_label in
784 the buffer and use the rest of it for the stack save area, which
785 is machine-dependent. */
787 mem = gen_rtx_MEM (Pmode, buf_addr);
788 set_mem_alias_set (mem, setjmp_alias_set);
789 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
791 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
792 set_mem_alias_set (mem, setjmp_alias_set);
794 emit_move_insn (validize_mem (mem),
795 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
797 stack_save = gen_rtx_MEM (sa_mode,
798 plus_constant (buf_addr,
799 2 * GET_MODE_SIZE (Pmode)));
800 set_mem_alias_set (stack_save, setjmp_alias_set);
801 emit_stack_save (SAVE_NONLOCAL, &stack_save);
803 /* If there is further processing to do, do it. */
804 #ifdef HAVE_builtin_setjmp_setup
805 if (HAVE_builtin_setjmp_setup)
806 emit_insn (gen_builtin_setjmp_setup (buf_addr));
807 #endif
809 /* We have a nonlocal label. */
810 cfun->has_nonlocal_label = 1;
813 /* Construct the trailing part of a __builtin_setjmp call. This is
814 also called directly by the SJLJ exception handling code. */
816 void
817 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
819 rtx chain;
821 /* Clobber the FP when we get here, so we have to make sure it's
822 marked as used by this function. */
823 emit_use (hard_frame_pointer_rtx);
825 /* Mark the static chain as clobbered here so life information
826 doesn't get messed up for it. */
827 chain = targetm.calls.static_chain (current_function_decl, true);
828 if (chain && REG_P (chain))
829 emit_clobber (chain);
831 /* Now put in the code to restore the frame pointer, and argument
832 pointer, if needed. */
833 #ifdef HAVE_nonlocal_goto
834 if (! HAVE_nonlocal_goto)
835 #endif
837 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
838 /* This might change the hard frame pointer in ways that aren't
839 apparent to early optimization passes, so force a clobber. */
840 emit_clobber (hard_frame_pointer_rtx);
843 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
844 if (fixed_regs[ARG_POINTER_REGNUM])
846 #ifdef ELIMINABLE_REGS
847 size_t i;
848 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
850 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
851 if (elim_regs[i].from == ARG_POINTER_REGNUM
852 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
853 break;
855 if (i == ARRAY_SIZE (elim_regs))
856 #endif
858 /* Now restore our arg pointer from the address at which it
859 was saved in our stack frame. */
860 emit_move_insn (crtl->args.internal_arg_pointer,
861 copy_to_reg (get_arg_pointer_save_area ()));
864 #endif
866 #ifdef HAVE_builtin_setjmp_receiver
867 if (HAVE_builtin_setjmp_receiver)
868 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
869 else
870 #endif
871 #ifdef HAVE_nonlocal_goto_receiver
872 if (HAVE_nonlocal_goto_receiver)
873 emit_insn (gen_nonlocal_goto_receiver ());
874 else
875 #endif
876 { /* Nothing */ }
878 /* We must not allow the code we just generated to be reordered by
879 scheduling. Specifically, the update of the frame pointer must
880 happen immediately, not later. */
881 emit_insn (gen_blockage ());
884 /* __builtin_longjmp is passed a pointer to an array of five words (not
885 all will be used on all machines). It operates similarly to the C
886 library function of the same name, but is more efficient. Much of
887 the code below is copied from the handling of non-local gotos. */
889 static void
890 expand_builtin_longjmp (rtx buf_addr, rtx value)
892 rtx fp, lab, stack, insn, last;
893 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
895 /* DRAP is needed for stack realign if longjmp is expanded to current
896 function */
897 if (SUPPORTS_STACK_ALIGNMENT)
898 crtl->need_drap = true;
900 if (setjmp_alias_set == -1)
901 setjmp_alias_set = new_alias_set ();
903 buf_addr = convert_memory_address (Pmode, buf_addr);
905 buf_addr = force_reg (Pmode, buf_addr);
907 /* We require that the user must pass a second argument of 1, because
908 that is what builtin_setjmp will return. */
909 gcc_assert (value == const1_rtx);
911 last = get_last_insn ();
912 #ifdef HAVE_builtin_longjmp
913 if (HAVE_builtin_longjmp)
914 emit_insn (gen_builtin_longjmp (buf_addr));
915 else
916 #endif
918 fp = gen_rtx_MEM (Pmode, buf_addr);
919 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
920 GET_MODE_SIZE (Pmode)));
922 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
923 2 * GET_MODE_SIZE (Pmode)));
924 set_mem_alias_set (fp, setjmp_alias_set);
925 set_mem_alias_set (lab, setjmp_alias_set);
926 set_mem_alias_set (stack, setjmp_alias_set);
928 /* Pick up FP, label, and SP from the block and jump. This code is
929 from expand_goto in stmt.c; see there for detailed comments. */
930 #ifdef HAVE_nonlocal_goto
931 if (HAVE_nonlocal_goto)
932 /* We have to pass a value to the nonlocal_goto pattern that will
933 get copied into the static_chain pointer, but it does not matter
934 what that value is, because builtin_setjmp does not use it. */
935 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
936 else
937 #endif
939 lab = copy_to_reg (lab);
941 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
942 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
944 emit_move_insn (hard_frame_pointer_rtx, fp);
945 emit_stack_restore (SAVE_NONLOCAL, stack);
947 emit_use (hard_frame_pointer_rtx);
948 emit_use (stack_pointer_rtx);
949 emit_indirect_jump (lab);
953 /* Search backwards and mark the jump insn as a non-local goto.
954 Note that this precludes the use of __builtin_longjmp to a
955 __builtin_setjmp target in the same function. However, we've
956 already cautioned the user that these functions are for
957 internal exception handling use only. */
958 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
960 gcc_assert (insn != last);
962 if (JUMP_P (insn))
964 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
965 break;
967 else if (CALL_P (insn))
968 break;
972 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
973 and the address of the save area. */
975 static rtx
976 expand_builtin_nonlocal_goto (tree exp)
978 tree t_label, t_save_area;
979 rtx r_label, r_save_area, r_fp, r_sp, insn;
981 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
982 return NULL_RTX;
984 t_label = CALL_EXPR_ARG (exp, 0);
985 t_save_area = CALL_EXPR_ARG (exp, 1);
987 r_label = expand_normal (t_label);
988 r_label = convert_memory_address (Pmode, r_label);
989 r_save_area = expand_normal (t_save_area);
990 r_save_area = convert_memory_address (Pmode, r_save_area);
991 /* Copy the address of the save location to a register just in case it was
992 based on the frame pointer. */
993 r_save_area = copy_to_reg (r_save_area);
994 r_fp = gen_rtx_MEM (Pmode, r_save_area);
995 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
996 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
998 crtl->has_nonlocal_goto = 1;
1000 #ifdef HAVE_nonlocal_goto
1001 /* ??? We no longer need to pass the static chain value, afaik. */
1002 if (HAVE_nonlocal_goto)
1003 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1004 else
1005 #endif
1007 r_label = copy_to_reg (r_label);
1009 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1010 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1012 /* Restore frame pointer for containing function. */
1013 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1014 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1016 /* USE of hard_frame_pointer_rtx added for consistency;
1017 not clear if really needed. */
1018 emit_use (hard_frame_pointer_rtx);
1019 emit_use (stack_pointer_rtx);
1021 /* If the architecture is using a GP register, we must
1022 conservatively assume that the target function makes use of it.
1023 The prologue of functions with nonlocal gotos must therefore
1024 initialize the GP register to the appropriate value, and we
1025 must then make sure that this value is live at the point
1026 of the jump. (Note that this doesn't necessarily apply
1027 to targets with a nonlocal_goto pattern; they are free
1028 to implement it in their own way. Note also that this is
1029 a no-op if the GP register is a global invariant.) */
1030 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1031 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1032 emit_use (pic_offset_table_rtx);
1034 emit_indirect_jump (r_label);
1037 /* Search backwards to the jump insn and mark it as a
1038 non-local goto. */
1039 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1041 if (JUMP_P (insn))
1043 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1044 break;
1046 else if (CALL_P (insn))
1047 break;
1050 return const0_rtx;
1053 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1054 (not all will be used on all machines) that was passed to __builtin_setjmp.
1055 It updates the stack pointer in that block to correspond to the current
1056 stack pointer. */
1058 static void
1059 expand_builtin_update_setjmp_buf (rtx buf_addr)
1061 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1062 rtx stack_save
1063 = gen_rtx_MEM (sa_mode,
1064 memory_address
1065 (sa_mode,
1066 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1068 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1071 /* Expand a call to __builtin_prefetch. For a target that does not support
1072 data prefetch, evaluate the memory address argument in case it has side
1073 effects. */
1075 static void
1076 expand_builtin_prefetch (tree exp)
1078 tree arg0, arg1, arg2;
1079 int nargs;
1080 rtx op0, op1, op2;
1082 if (!validate_arglist (exp, POINTER_TYPE, 0))
1083 return;
1085 arg0 = CALL_EXPR_ARG (exp, 0);
1087 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1088 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1089 locality). */
1090 nargs = call_expr_nargs (exp);
1091 if (nargs > 1)
1092 arg1 = CALL_EXPR_ARG (exp, 1);
1093 else
1094 arg1 = integer_zero_node;
1095 if (nargs > 2)
1096 arg2 = CALL_EXPR_ARG (exp, 2);
1097 else
1098 arg2 = integer_three_node;
1100 /* Argument 0 is an address. */
1101 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1103 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1104 if (TREE_CODE (arg1) != INTEGER_CST)
1106 error ("second argument to %<__builtin_prefetch%> must be a constant");
1107 arg1 = integer_zero_node;
1109 op1 = expand_normal (arg1);
1110 /* Argument 1 must be either zero or one. */
1111 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1113 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1114 " using zero");
1115 op1 = const0_rtx;
1118 /* Argument 2 (locality) must be a compile-time constant int. */
1119 if (TREE_CODE (arg2) != INTEGER_CST)
1121 error ("third argument to %<__builtin_prefetch%> must be a constant");
1122 arg2 = integer_zero_node;
1124 op2 = expand_normal (arg2);
1125 /* Argument 2 must be 0, 1, 2, or 3. */
1126 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1128 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1129 op2 = const0_rtx;
1132 #ifdef HAVE_prefetch
1133 if (HAVE_prefetch)
1135 struct expand_operand ops[3];
1137 create_address_operand (&ops[0], op0);
1138 create_integer_operand (&ops[1], INTVAL (op1));
1139 create_integer_operand (&ops[2], INTVAL (op2));
1140 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1141 return;
1143 #endif
1145 /* Don't do anything with direct references to volatile memory, but
1146 generate code to handle other side effects. */
1147 if (!MEM_P (op0) && side_effects_p (op0))
1148 emit_insn (op0);
1151 /* Get a MEM rtx for expression EXP which is the address of an operand
1152 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1153 the maximum length of the block of memory that might be accessed or
1154 NULL if unknown. */
1156 static rtx
1157 get_memory_rtx (tree exp, tree len)
1159 tree orig_exp = exp;
1160 rtx addr, mem;
1161 HOST_WIDE_INT off;
1163 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1164 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1165 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1166 exp = TREE_OPERAND (exp, 0);
1168 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1169 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1171 /* Get an expression we can use to find the attributes to assign to MEM.
1172 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1173 we can. First remove any nops. */
1174 while (CONVERT_EXPR_P (exp)
1175 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1176 exp = TREE_OPERAND (exp, 0);
1178 off = 0;
1179 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1180 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1181 && host_integerp (TREE_OPERAND (exp, 1), 0)
1182 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1183 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1184 else if (TREE_CODE (exp) == ADDR_EXPR)
1185 exp = TREE_OPERAND (exp, 0);
1186 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1187 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1188 else
1189 exp = NULL;
1191 /* Honor attributes derived from exp, except for the alias set
1192 (as builtin stringops may alias with anything) and the size
1193 (as stringops may access multiple array elements). */
1194 if (exp)
1196 set_mem_attributes (mem, exp, 0);
1198 if (off)
1199 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1201 /* Allow the string and memory builtins to overflow from one
1202 field into another, see http://gcc.gnu.org/PR23561.
1203 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1204 memory accessed by the string or memory builtin will fit
1205 within the field. */
1206 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1208 tree mem_expr = MEM_EXPR (mem);
1209 HOST_WIDE_INT offset = -1, length = -1;
1210 tree inner = exp;
1212 while (TREE_CODE (inner) == ARRAY_REF
1213 || CONVERT_EXPR_P (inner)
1214 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1215 || TREE_CODE (inner) == SAVE_EXPR)
1216 inner = TREE_OPERAND (inner, 0);
1218 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1220 if (MEM_OFFSET (mem)
1221 && CONST_INT_P (MEM_OFFSET (mem)))
1222 offset = INTVAL (MEM_OFFSET (mem));
1224 if (offset >= 0 && len && host_integerp (len, 0))
1225 length = tree_low_cst (len, 0);
1227 while (TREE_CODE (inner) == COMPONENT_REF)
1229 tree field = TREE_OPERAND (inner, 1);
1230 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1231 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1233 /* Bitfields are generally not byte-addressable. */
1234 gcc_assert (!DECL_BIT_FIELD (field)
1235 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1236 % BITS_PER_UNIT) == 0
1237 && host_integerp (DECL_SIZE (field), 0)
1238 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1239 % BITS_PER_UNIT) == 0));
1241 /* If we can prove that the memory starting at XEXP (mem, 0) and
1242 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1243 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1244 fields without DECL_SIZE_UNIT like flexible array members. */
1245 if (length >= 0
1246 && DECL_SIZE_UNIT (field)
1247 && host_integerp (DECL_SIZE_UNIT (field), 0))
1249 HOST_WIDE_INT size
1250 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1251 if (offset <= size
1252 && length <= size
1253 && offset + length <= size)
1254 break;
1257 if (offset >= 0
1258 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1259 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1260 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1261 / BITS_PER_UNIT;
1262 else
1264 offset = -1;
1265 length = -1;
1268 mem_expr = TREE_OPERAND (mem_expr, 0);
1269 inner = TREE_OPERAND (inner, 0);
1272 if (mem_expr == NULL)
1273 offset = -1;
1274 if (mem_expr != MEM_EXPR (mem))
1276 set_mem_expr (mem, mem_expr);
1277 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1280 set_mem_alias_set (mem, 0);
1281 set_mem_size (mem, NULL_RTX);
1284 return mem;
1287 /* Built-in functions to perform an untyped call and return. */
1289 #define apply_args_mode \
1290 (this_target_builtins->x_apply_args_mode)
1291 #define apply_result_mode \
1292 (this_target_builtins->x_apply_result_mode)
1294 /* Return the size required for the block returned by __builtin_apply_args,
1295 and initialize apply_args_mode. */
1297 static int
1298 apply_args_size (void)
1300 static int size = -1;
1301 int align;
1302 unsigned int regno;
1303 enum machine_mode mode;
1305 /* The values computed by this function never change. */
1306 if (size < 0)
1308 /* The first value is the incoming arg-pointer. */
1309 size = GET_MODE_SIZE (Pmode);
1311 /* The second value is the structure value address unless this is
1312 passed as an "invisible" first argument. */
1313 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1314 size += GET_MODE_SIZE (Pmode);
1316 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1317 if (FUNCTION_ARG_REGNO_P (regno))
1319 mode = targetm.calls.get_raw_arg_mode (regno);
1321 gcc_assert (mode != VOIDmode);
1323 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1324 if (size % align != 0)
1325 size = CEIL (size, align) * align;
1326 size += GET_MODE_SIZE (mode);
1327 apply_args_mode[regno] = mode;
1329 else
1331 apply_args_mode[regno] = VOIDmode;
1334 return size;
1337 /* Return the size required for the block returned by __builtin_apply,
1338 and initialize apply_result_mode. */
1340 static int
1341 apply_result_size (void)
1343 static int size = -1;
1344 int align, regno;
1345 enum machine_mode mode;
1347 /* The values computed by this function never change. */
1348 if (size < 0)
1350 size = 0;
1352 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1353 if (targetm.calls.function_value_regno_p (regno))
1355 mode = targetm.calls.get_raw_result_mode (regno);
1357 gcc_assert (mode != VOIDmode);
1359 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1360 if (size % align != 0)
1361 size = CEIL (size, align) * align;
1362 size += GET_MODE_SIZE (mode);
1363 apply_result_mode[regno] = mode;
1365 else
1366 apply_result_mode[regno] = VOIDmode;
1368 /* Allow targets that use untyped_call and untyped_return to override
1369 the size so that machine-specific information can be stored here. */
1370 #ifdef APPLY_RESULT_SIZE
1371 size = APPLY_RESULT_SIZE;
1372 #endif
1374 return size;
1377 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1378 /* Create a vector describing the result block RESULT. If SAVEP is true,
1379 the result block is used to save the values; otherwise it is used to
1380 restore the values. */
1382 static rtx
1383 result_vector (int savep, rtx result)
1385 int regno, size, align, nelts;
1386 enum machine_mode mode;
1387 rtx reg, mem;
1388 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1390 size = nelts = 0;
1391 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1392 if ((mode = apply_result_mode[regno]) != VOIDmode)
1394 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1395 if (size % align != 0)
1396 size = CEIL (size, align) * align;
1397 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1398 mem = adjust_address (result, mode, size);
1399 savevec[nelts++] = (savep
1400 ? gen_rtx_SET (VOIDmode, mem, reg)
1401 : gen_rtx_SET (VOIDmode, reg, mem));
1402 size += GET_MODE_SIZE (mode);
1404 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1406 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1408 /* Save the state required to perform an untyped call with the same
1409 arguments as were passed to the current function. */
1411 static rtx
1412 expand_builtin_apply_args_1 (void)
1414 rtx registers, tem;
1415 int size, align, regno;
1416 enum machine_mode mode;
1417 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1419 /* Create a block where the arg-pointer, structure value address,
1420 and argument registers can be saved. */
1421 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1423 /* Walk past the arg-pointer and structure value address. */
1424 size = GET_MODE_SIZE (Pmode);
1425 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1426 size += GET_MODE_SIZE (Pmode);
1428 /* Save each register used in calling a function to the block. */
1429 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1430 if ((mode = apply_args_mode[regno]) != VOIDmode)
1432 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1433 if (size % align != 0)
1434 size = CEIL (size, align) * align;
1436 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1438 emit_move_insn (adjust_address (registers, mode, size), tem);
1439 size += GET_MODE_SIZE (mode);
1442 /* Save the arg pointer to the block. */
1443 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1444 #ifdef STACK_GROWS_DOWNWARD
1445 /* We need the pointer as the caller actually passed them to us, not
1446 as we might have pretended they were passed. Make sure it's a valid
1447 operand, as emit_move_insn isn't expected to handle a PLUS. */
1449 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1450 NULL_RTX);
1451 #endif
1452 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1454 size = GET_MODE_SIZE (Pmode);
1456 /* Save the structure value address unless this is passed as an
1457 "invisible" first argument. */
1458 if (struct_incoming_value)
1460 emit_move_insn (adjust_address (registers, Pmode, size),
1461 copy_to_reg (struct_incoming_value));
1462 size += GET_MODE_SIZE (Pmode);
1465 /* Return the address of the block. */
1466 return copy_addr_to_reg (XEXP (registers, 0));
1469 /* __builtin_apply_args returns block of memory allocated on
1470 the stack into which is stored the arg pointer, structure
1471 value address, static chain, and all the registers that might
1472 possibly be used in performing a function call. The code is
1473 moved to the start of the function so the incoming values are
1474 saved. */
1476 static rtx
1477 expand_builtin_apply_args (void)
1479 /* Don't do __builtin_apply_args more than once in a function.
1480 Save the result of the first call and reuse it. */
1481 if (apply_args_value != 0)
1482 return apply_args_value;
1484 /* When this function is called, it means that registers must be
1485 saved on entry to this function. So we migrate the
1486 call to the first insn of this function. */
1487 rtx temp;
1488 rtx seq;
1490 start_sequence ();
1491 temp = expand_builtin_apply_args_1 ();
1492 seq = get_insns ();
1493 end_sequence ();
1495 apply_args_value = temp;
1497 /* Put the insns after the NOTE that starts the function.
1498 If this is inside a start_sequence, make the outer-level insn
1499 chain current, so the code is placed at the start of the
1500 function. If internal_arg_pointer is a non-virtual pseudo,
1501 it needs to be placed after the function that initializes
1502 that pseudo. */
1503 push_topmost_sequence ();
1504 if (REG_P (crtl->args.internal_arg_pointer)
1505 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1506 emit_insn_before (seq, parm_birth_insn);
1507 else
1508 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1509 pop_topmost_sequence ();
1510 return temp;
1514 /* Perform an untyped call and save the state required to perform an
1515 untyped return of whatever value was returned by the given function. */
1517 static rtx
1518 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1520 int size, align, regno;
1521 enum machine_mode mode;
1522 rtx incoming_args, result, reg, dest, src, call_insn;
1523 rtx old_stack_level = 0;
1524 rtx call_fusage = 0;
1525 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1527 arguments = convert_memory_address (Pmode, arguments);
1529 /* Create a block where the return registers can be saved. */
1530 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1532 /* Fetch the arg pointer from the ARGUMENTS block. */
1533 incoming_args = gen_reg_rtx (Pmode);
1534 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1535 #ifndef STACK_GROWS_DOWNWARD
1536 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1537 incoming_args, 0, OPTAB_LIB_WIDEN);
1538 #endif
1540 /* Push a new argument block and copy the arguments. Do not allow
1541 the (potential) memcpy call below to interfere with our stack
1542 manipulations. */
1543 do_pending_stack_adjust ();
1544 NO_DEFER_POP;
1546 /* Save the stack with nonlocal if available. */
1547 #ifdef HAVE_save_stack_nonlocal
1548 if (HAVE_save_stack_nonlocal)
1549 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1550 else
1551 #endif
1552 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1554 /* Allocate a block of memory onto the stack and copy the memory
1555 arguments to the outgoing arguments address. We can pass TRUE
1556 as the 4th argument because we just saved the stack pointer
1557 and will restore it right after the call. */
1558 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1560 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1561 may have already set current_function_calls_alloca to true.
1562 current_function_calls_alloca won't be set if argsize is zero,
1563 so we have to guarantee need_drap is true here. */
1564 if (SUPPORTS_STACK_ALIGNMENT)
1565 crtl->need_drap = true;
1567 dest = virtual_outgoing_args_rtx;
1568 #ifndef STACK_GROWS_DOWNWARD
1569 if (CONST_INT_P (argsize))
1570 dest = plus_constant (dest, -INTVAL (argsize));
1571 else
1572 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1573 #endif
1574 dest = gen_rtx_MEM (BLKmode, dest);
1575 set_mem_align (dest, PARM_BOUNDARY);
1576 src = gen_rtx_MEM (BLKmode, incoming_args);
1577 set_mem_align (src, PARM_BOUNDARY);
1578 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1580 /* Refer to the argument block. */
1581 apply_args_size ();
1582 arguments = gen_rtx_MEM (BLKmode, arguments);
1583 set_mem_align (arguments, PARM_BOUNDARY);
1585 /* Walk past the arg-pointer and structure value address. */
1586 size = GET_MODE_SIZE (Pmode);
1587 if (struct_value)
1588 size += GET_MODE_SIZE (Pmode);
1590 /* Restore each of the registers previously saved. Make USE insns
1591 for each of these registers for use in making the call. */
1592 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1593 if ((mode = apply_args_mode[regno]) != VOIDmode)
1595 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1596 if (size % align != 0)
1597 size = CEIL (size, align) * align;
1598 reg = gen_rtx_REG (mode, regno);
1599 emit_move_insn (reg, adjust_address (arguments, mode, size));
1600 use_reg (&call_fusage, reg);
1601 size += GET_MODE_SIZE (mode);
1604 /* Restore the structure value address unless this is passed as an
1605 "invisible" first argument. */
1606 size = GET_MODE_SIZE (Pmode);
1607 if (struct_value)
1609 rtx value = gen_reg_rtx (Pmode);
1610 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1611 emit_move_insn (struct_value, value);
1612 if (REG_P (struct_value))
1613 use_reg (&call_fusage, struct_value);
1614 size += GET_MODE_SIZE (Pmode);
1617 /* All arguments and registers used for the call are set up by now! */
1618 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1620 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1621 and we don't want to load it into a register as an optimization,
1622 because prepare_call_address already did it if it should be done. */
1623 if (GET_CODE (function) != SYMBOL_REF)
1624 function = memory_address (FUNCTION_MODE, function);
1626 /* Generate the actual call instruction and save the return value. */
1627 #ifdef HAVE_untyped_call
1628 if (HAVE_untyped_call)
1629 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1630 result, result_vector (1, result)));
1631 else
1632 #endif
1633 #ifdef HAVE_call_value
1634 if (HAVE_call_value)
1636 rtx valreg = 0;
1638 /* Locate the unique return register. It is not possible to
1639 express a call that sets more than one return register using
1640 call_value; use untyped_call for that. In fact, untyped_call
1641 only needs to save the return registers in the given block. */
1642 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1643 if ((mode = apply_result_mode[regno]) != VOIDmode)
1645 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1647 valreg = gen_rtx_REG (mode, regno);
1650 emit_call_insn (GEN_CALL_VALUE (valreg,
1651 gen_rtx_MEM (FUNCTION_MODE, function),
1652 const0_rtx, NULL_RTX, const0_rtx));
1654 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1656 else
1657 #endif
1658 gcc_unreachable ();
1660 /* Find the CALL insn we just emitted, and attach the register usage
1661 information. */
1662 call_insn = last_call_insn ();
1663 add_function_usage_to (call_insn, call_fusage);
1665 /* Restore the stack. */
1666 #ifdef HAVE_save_stack_nonlocal
1667 if (HAVE_save_stack_nonlocal)
1668 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1669 else
1670 #endif
1671 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1673 OK_DEFER_POP;
1675 /* Return the address of the result block. */
1676 result = copy_addr_to_reg (XEXP (result, 0));
1677 return convert_memory_address (ptr_mode, result);
1680 /* Perform an untyped return. */
1682 static void
1683 expand_builtin_return (rtx result)
1685 int size, align, regno;
1686 enum machine_mode mode;
1687 rtx reg;
1688 rtx call_fusage = 0;
1690 result = convert_memory_address (Pmode, result);
1692 apply_result_size ();
1693 result = gen_rtx_MEM (BLKmode, result);
1695 #ifdef HAVE_untyped_return
1696 if (HAVE_untyped_return)
1698 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1699 emit_barrier ();
1700 return;
1702 #endif
1704 /* Restore the return value and note that each value is used. */
1705 size = 0;
1706 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1707 if ((mode = apply_result_mode[regno]) != VOIDmode)
1709 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1710 if (size % align != 0)
1711 size = CEIL (size, align) * align;
1712 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1713 emit_move_insn (reg, adjust_address (result, mode, size));
1715 push_to_sequence (call_fusage);
1716 emit_use (reg);
1717 call_fusage = get_insns ();
1718 end_sequence ();
1719 size += GET_MODE_SIZE (mode);
1722 /* Put the USE insns before the return. */
1723 emit_insn (call_fusage);
1725 /* Return whatever values was restored by jumping directly to the end
1726 of the function. */
1727 expand_naked_return ();
1730 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1732 static enum type_class
1733 type_to_class (tree type)
1735 switch (TREE_CODE (type))
1737 case VOID_TYPE: return void_type_class;
1738 case INTEGER_TYPE: return integer_type_class;
1739 case ENUMERAL_TYPE: return enumeral_type_class;
1740 case BOOLEAN_TYPE: return boolean_type_class;
1741 case POINTER_TYPE: return pointer_type_class;
1742 case REFERENCE_TYPE: return reference_type_class;
1743 case OFFSET_TYPE: return offset_type_class;
1744 case REAL_TYPE: return real_type_class;
1745 case COMPLEX_TYPE: return complex_type_class;
1746 case FUNCTION_TYPE: return function_type_class;
1747 case METHOD_TYPE: return method_type_class;
1748 case RECORD_TYPE: return record_type_class;
1749 case UNION_TYPE:
1750 case QUAL_UNION_TYPE: return union_type_class;
1751 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1752 ? string_type_class : array_type_class);
1753 case LANG_TYPE: return lang_type_class;
1754 default: return no_type_class;
1758 /* Expand a call EXP to __builtin_classify_type. */
1760 static rtx
1761 expand_builtin_classify_type (tree exp)
1763 if (call_expr_nargs (exp))
1764 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1765 return GEN_INT (no_type_class);
1768 /* This helper macro, meant to be used in mathfn_built_in below,
1769 determines which among a set of three builtin math functions is
1770 appropriate for a given type mode. The `F' and `L' cases are
1771 automatically generated from the `double' case. */
1772 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1773 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1774 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1775 fcodel = BUILT_IN_MATHFN##L ; break;
1776 /* Similar to above, but appends _R after any F/L suffix. */
1777 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1778 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1779 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1780 fcodel = BUILT_IN_MATHFN##L_R ; break;
1782 /* Return mathematic function equivalent to FN but operating directly
1783 on TYPE, if available. If IMPLICIT is true find the function in
1784 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1785 can't do the conversion, return zero. */
1787 static tree
1788 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1790 tree const *const fn_arr
1791 = implicit ? implicit_built_in_decls : built_in_decls;
1792 enum built_in_function fcode, fcodef, fcodel;
1794 switch (fn)
1796 CASE_MATHFN (BUILT_IN_ACOS)
1797 CASE_MATHFN (BUILT_IN_ACOSH)
1798 CASE_MATHFN (BUILT_IN_ASIN)
1799 CASE_MATHFN (BUILT_IN_ASINH)
1800 CASE_MATHFN (BUILT_IN_ATAN)
1801 CASE_MATHFN (BUILT_IN_ATAN2)
1802 CASE_MATHFN (BUILT_IN_ATANH)
1803 CASE_MATHFN (BUILT_IN_CBRT)
1804 CASE_MATHFN (BUILT_IN_CEIL)
1805 CASE_MATHFN (BUILT_IN_CEXPI)
1806 CASE_MATHFN (BUILT_IN_COPYSIGN)
1807 CASE_MATHFN (BUILT_IN_COS)
1808 CASE_MATHFN (BUILT_IN_COSH)
1809 CASE_MATHFN (BUILT_IN_DREM)
1810 CASE_MATHFN (BUILT_IN_ERF)
1811 CASE_MATHFN (BUILT_IN_ERFC)
1812 CASE_MATHFN (BUILT_IN_EXP)
1813 CASE_MATHFN (BUILT_IN_EXP10)
1814 CASE_MATHFN (BUILT_IN_EXP2)
1815 CASE_MATHFN (BUILT_IN_EXPM1)
1816 CASE_MATHFN (BUILT_IN_FABS)
1817 CASE_MATHFN (BUILT_IN_FDIM)
1818 CASE_MATHFN (BUILT_IN_FLOOR)
1819 CASE_MATHFN (BUILT_IN_FMA)
1820 CASE_MATHFN (BUILT_IN_FMAX)
1821 CASE_MATHFN (BUILT_IN_FMIN)
1822 CASE_MATHFN (BUILT_IN_FMOD)
1823 CASE_MATHFN (BUILT_IN_FREXP)
1824 CASE_MATHFN (BUILT_IN_GAMMA)
1825 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1826 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1827 CASE_MATHFN (BUILT_IN_HYPOT)
1828 CASE_MATHFN (BUILT_IN_ILOGB)
1829 CASE_MATHFN (BUILT_IN_INF)
1830 CASE_MATHFN (BUILT_IN_ISINF)
1831 CASE_MATHFN (BUILT_IN_J0)
1832 CASE_MATHFN (BUILT_IN_J1)
1833 CASE_MATHFN (BUILT_IN_JN)
1834 CASE_MATHFN (BUILT_IN_LCEIL)
1835 CASE_MATHFN (BUILT_IN_LDEXP)
1836 CASE_MATHFN (BUILT_IN_LFLOOR)
1837 CASE_MATHFN (BUILT_IN_LGAMMA)
1838 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1839 CASE_MATHFN (BUILT_IN_LLCEIL)
1840 CASE_MATHFN (BUILT_IN_LLFLOOR)
1841 CASE_MATHFN (BUILT_IN_LLRINT)
1842 CASE_MATHFN (BUILT_IN_LLROUND)
1843 CASE_MATHFN (BUILT_IN_LOG)
1844 CASE_MATHFN (BUILT_IN_LOG10)
1845 CASE_MATHFN (BUILT_IN_LOG1P)
1846 CASE_MATHFN (BUILT_IN_LOG2)
1847 CASE_MATHFN (BUILT_IN_LOGB)
1848 CASE_MATHFN (BUILT_IN_LRINT)
1849 CASE_MATHFN (BUILT_IN_LROUND)
1850 CASE_MATHFN (BUILT_IN_MODF)
1851 CASE_MATHFN (BUILT_IN_NAN)
1852 CASE_MATHFN (BUILT_IN_NANS)
1853 CASE_MATHFN (BUILT_IN_NEARBYINT)
1854 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1855 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1856 CASE_MATHFN (BUILT_IN_POW)
1857 CASE_MATHFN (BUILT_IN_POWI)
1858 CASE_MATHFN (BUILT_IN_POW10)
1859 CASE_MATHFN (BUILT_IN_REMAINDER)
1860 CASE_MATHFN (BUILT_IN_REMQUO)
1861 CASE_MATHFN (BUILT_IN_RINT)
1862 CASE_MATHFN (BUILT_IN_ROUND)
1863 CASE_MATHFN (BUILT_IN_SCALB)
1864 CASE_MATHFN (BUILT_IN_SCALBLN)
1865 CASE_MATHFN (BUILT_IN_SCALBN)
1866 CASE_MATHFN (BUILT_IN_SIGNBIT)
1867 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1868 CASE_MATHFN (BUILT_IN_SIN)
1869 CASE_MATHFN (BUILT_IN_SINCOS)
1870 CASE_MATHFN (BUILT_IN_SINH)
1871 CASE_MATHFN (BUILT_IN_SQRT)
1872 CASE_MATHFN (BUILT_IN_TAN)
1873 CASE_MATHFN (BUILT_IN_TANH)
1874 CASE_MATHFN (BUILT_IN_TGAMMA)
1875 CASE_MATHFN (BUILT_IN_TRUNC)
1876 CASE_MATHFN (BUILT_IN_Y0)
1877 CASE_MATHFN (BUILT_IN_Y1)
1878 CASE_MATHFN (BUILT_IN_YN)
1880 default:
1881 return NULL_TREE;
1884 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1885 return fn_arr[fcode];
1886 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1887 return fn_arr[fcodef];
1888 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1889 return fn_arr[fcodel];
1890 else
1891 return NULL_TREE;
1894 /* Like mathfn_built_in_1(), but always use the implicit array. */
1896 tree
1897 mathfn_built_in (tree type, enum built_in_function fn)
1899 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1902 /* If errno must be maintained, expand the RTL to check if the result,
1903 TARGET, of a built-in function call, EXP, is NaN, and if so set
1904 errno to EDOM. */
1906 static void
1907 expand_errno_check (tree exp, rtx target)
1909 rtx lab = gen_label_rtx ();
1911 /* Test the result; if it is NaN, set errno=EDOM because
1912 the argument was not in the domain. */
1913 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1914 NULL_RTX, NULL_RTX, lab,
1915 /* The jump is very likely. */
1916 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1918 #ifdef TARGET_EDOM
1919 /* If this built-in doesn't throw an exception, set errno directly. */
1920 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1922 #ifdef GEN_ERRNO_RTX
1923 rtx errno_rtx = GEN_ERRNO_RTX;
1924 #else
1925 rtx errno_rtx
1926 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1927 #endif
1928 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1929 emit_label (lab);
1930 return;
1932 #endif
1934 /* Make sure the library call isn't expanded as a tail call. */
1935 CALL_EXPR_TAILCALL (exp) = 0;
1937 /* We can't set errno=EDOM directly; let the library call do it.
1938 Pop the arguments right away in case the call gets deleted. */
1939 NO_DEFER_POP;
1940 expand_call (exp, target, 0);
1941 OK_DEFER_POP;
1942 emit_label (lab);
1945 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1946 Return NULL_RTX if a normal call should be emitted rather than expanding
1947 the function in-line. EXP is the expression that is a call to the builtin
1948 function; if convenient, the result should be placed in TARGET.
1949 SUBTARGET may be used as the target for computing one of EXP's operands. */
1951 static rtx
1952 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1954 optab builtin_optab;
1955 rtx op0, insns;
1956 tree fndecl = get_callee_fndecl (exp);
1957 enum machine_mode mode;
1958 bool errno_set = false;
1959 tree arg;
1961 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1962 return NULL_RTX;
1964 arg = CALL_EXPR_ARG (exp, 0);
1966 switch (DECL_FUNCTION_CODE (fndecl))
1968 CASE_FLT_FN (BUILT_IN_SQRT):
1969 errno_set = ! tree_expr_nonnegative_p (arg);
1970 builtin_optab = sqrt_optab;
1971 break;
1972 CASE_FLT_FN (BUILT_IN_EXP):
1973 errno_set = true; builtin_optab = exp_optab; break;
1974 CASE_FLT_FN (BUILT_IN_EXP10):
1975 CASE_FLT_FN (BUILT_IN_POW10):
1976 errno_set = true; builtin_optab = exp10_optab; break;
1977 CASE_FLT_FN (BUILT_IN_EXP2):
1978 errno_set = true; builtin_optab = exp2_optab; break;
1979 CASE_FLT_FN (BUILT_IN_EXPM1):
1980 errno_set = true; builtin_optab = expm1_optab; break;
1981 CASE_FLT_FN (BUILT_IN_LOGB):
1982 errno_set = true; builtin_optab = logb_optab; break;
1983 CASE_FLT_FN (BUILT_IN_LOG):
1984 errno_set = true; builtin_optab = log_optab; break;
1985 CASE_FLT_FN (BUILT_IN_LOG10):
1986 errno_set = true; builtin_optab = log10_optab; break;
1987 CASE_FLT_FN (BUILT_IN_LOG2):
1988 errno_set = true; builtin_optab = log2_optab; break;
1989 CASE_FLT_FN (BUILT_IN_LOG1P):
1990 errno_set = true; builtin_optab = log1p_optab; break;
1991 CASE_FLT_FN (BUILT_IN_ASIN):
1992 builtin_optab = asin_optab; break;
1993 CASE_FLT_FN (BUILT_IN_ACOS):
1994 builtin_optab = acos_optab; break;
1995 CASE_FLT_FN (BUILT_IN_TAN):
1996 builtin_optab = tan_optab; break;
1997 CASE_FLT_FN (BUILT_IN_ATAN):
1998 builtin_optab = atan_optab; break;
1999 CASE_FLT_FN (BUILT_IN_FLOOR):
2000 builtin_optab = floor_optab; break;
2001 CASE_FLT_FN (BUILT_IN_CEIL):
2002 builtin_optab = ceil_optab; break;
2003 CASE_FLT_FN (BUILT_IN_TRUNC):
2004 builtin_optab = btrunc_optab; break;
2005 CASE_FLT_FN (BUILT_IN_ROUND):
2006 builtin_optab = round_optab; break;
2007 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2008 builtin_optab = nearbyint_optab;
2009 if (flag_trapping_math)
2010 break;
2011 /* Else fallthrough and expand as rint. */
2012 CASE_FLT_FN (BUILT_IN_RINT):
2013 builtin_optab = rint_optab; break;
2014 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2015 builtin_optab = significand_optab; break;
2016 default:
2017 gcc_unreachable ();
2020 /* Make a suitable register to place result in. */
2021 mode = TYPE_MODE (TREE_TYPE (exp));
2023 if (! flag_errno_math || ! HONOR_NANS (mode))
2024 errno_set = false;
2026 /* Before working hard, check whether the instruction is available. */
2027 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2028 && (!errno_set || !optimize_insn_for_size_p ()))
2030 target = gen_reg_rtx (mode);
2032 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2033 need to expand the argument again. This way, we will not perform
2034 side-effects more the once. */
2035 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2037 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2039 start_sequence ();
2041 /* Compute into TARGET.
2042 Set TARGET to wherever the result comes back. */
2043 target = expand_unop (mode, builtin_optab, op0, target, 0);
2045 if (target != 0)
2047 if (errno_set)
2048 expand_errno_check (exp, target);
2050 /* Output the entire sequence. */
2051 insns = get_insns ();
2052 end_sequence ();
2053 emit_insn (insns);
2054 return target;
2057 /* If we were unable to expand via the builtin, stop the sequence
2058 (without outputting the insns) and call to the library function
2059 with the stabilized argument list. */
2060 end_sequence ();
2063 return expand_call (exp, target, target == const0_rtx);
2066 /* Expand a call to the builtin binary math functions (pow and atan2).
2067 Return NULL_RTX if a normal call should be emitted rather than expanding the
2068 function in-line. EXP is the expression that is a call to the builtin
2069 function; if convenient, the result should be placed in TARGET.
2070 SUBTARGET may be used as the target for computing one of EXP's
2071 operands. */
2073 static rtx
2074 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2076 optab builtin_optab;
2077 rtx op0, op1, insns;
2078 int op1_type = REAL_TYPE;
2079 tree fndecl = get_callee_fndecl (exp);
2080 tree arg0, arg1;
2081 enum machine_mode mode;
2082 bool errno_set = true;
2084 switch (DECL_FUNCTION_CODE (fndecl))
2086 CASE_FLT_FN (BUILT_IN_SCALBN):
2087 CASE_FLT_FN (BUILT_IN_SCALBLN):
2088 CASE_FLT_FN (BUILT_IN_LDEXP):
2089 op1_type = INTEGER_TYPE;
2090 default:
2091 break;
2094 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2095 return NULL_RTX;
2097 arg0 = CALL_EXPR_ARG (exp, 0);
2098 arg1 = CALL_EXPR_ARG (exp, 1);
2100 switch (DECL_FUNCTION_CODE (fndecl))
2102 CASE_FLT_FN (BUILT_IN_POW):
2103 builtin_optab = pow_optab; break;
2104 CASE_FLT_FN (BUILT_IN_ATAN2):
2105 builtin_optab = atan2_optab; break;
2106 CASE_FLT_FN (BUILT_IN_SCALB):
2107 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2108 return 0;
2109 builtin_optab = scalb_optab; break;
2110 CASE_FLT_FN (BUILT_IN_SCALBN):
2111 CASE_FLT_FN (BUILT_IN_SCALBLN):
2112 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2113 return 0;
2114 /* Fall through... */
2115 CASE_FLT_FN (BUILT_IN_LDEXP):
2116 builtin_optab = ldexp_optab; break;
2117 CASE_FLT_FN (BUILT_IN_FMOD):
2118 builtin_optab = fmod_optab; break;
2119 CASE_FLT_FN (BUILT_IN_REMAINDER):
2120 CASE_FLT_FN (BUILT_IN_DREM):
2121 builtin_optab = remainder_optab; break;
2122 default:
2123 gcc_unreachable ();
2126 /* Make a suitable register to place result in. */
2127 mode = TYPE_MODE (TREE_TYPE (exp));
2129 /* Before working hard, check whether the instruction is available. */
2130 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2131 return NULL_RTX;
2133 target = gen_reg_rtx (mode);
2135 if (! flag_errno_math || ! HONOR_NANS (mode))
2136 errno_set = false;
2138 if (errno_set && optimize_insn_for_size_p ())
2139 return 0;
2141 /* Always stabilize the argument list. */
2142 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2143 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2145 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2146 op1 = expand_normal (arg1);
2148 start_sequence ();
2150 /* Compute into TARGET.
2151 Set TARGET to wherever the result comes back. */
2152 target = expand_binop (mode, builtin_optab, op0, op1,
2153 target, 0, OPTAB_DIRECT);
2155 /* If we were unable to expand via the builtin, stop the sequence
2156 (without outputting the insns) and call to the library function
2157 with the stabilized argument list. */
2158 if (target == 0)
2160 end_sequence ();
2161 return expand_call (exp, target, target == const0_rtx);
2164 if (errno_set)
2165 expand_errno_check (exp, target);
2167 /* Output the entire sequence. */
2168 insns = get_insns ();
2169 end_sequence ();
2170 emit_insn (insns);
2172 return target;
2175 /* Expand a call to the builtin trinary math functions (fma).
2176 Return NULL_RTX if a normal call should be emitted rather than expanding the
2177 function in-line. EXP is the expression that is a call to the builtin
2178 function; if convenient, the result should be placed in TARGET.
2179 SUBTARGET may be used as the target for computing one of EXP's
2180 operands. */
2182 static rtx
2183 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2185 optab builtin_optab;
2186 rtx op0, op1, op2, insns;
2187 tree fndecl = get_callee_fndecl (exp);
2188 tree arg0, arg1, arg2;
2189 enum machine_mode mode;
2191 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2192 return NULL_RTX;
2194 arg0 = CALL_EXPR_ARG (exp, 0);
2195 arg1 = CALL_EXPR_ARG (exp, 1);
2196 arg2 = CALL_EXPR_ARG (exp, 2);
2198 switch (DECL_FUNCTION_CODE (fndecl))
2200 CASE_FLT_FN (BUILT_IN_FMA):
2201 builtin_optab = fma_optab; break;
2202 default:
2203 gcc_unreachable ();
2206 /* Make a suitable register to place result in. */
2207 mode = TYPE_MODE (TREE_TYPE (exp));
2209 /* Before working hard, check whether the instruction is available. */
2210 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2211 return NULL_RTX;
2213 target = gen_reg_rtx (mode);
2215 /* Always stabilize the argument list. */
2216 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2217 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2218 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2220 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2221 op1 = expand_normal (arg1);
2222 op2 = expand_normal (arg2);
2224 start_sequence ();
2226 /* Compute into TARGET.
2227 Set TARGET to wherever the result comes back. */
2228 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2229 target, 0);
2231 /* If we were unable to expand via the builtin, stop the sequence
2232 (without outputting the insns) and call to the library function
2233 with the stabilized argument list. */
2234 if (target == 0)
2236 end_sequence ();
2237 return expand_call (exp, target, target == const0_rtx);
2240 /* Output the entire sequence. */
2241 insns = get_insns ();
2242 end_sequence ();
2243 emit_insn (insns);
2245 return target;
2248 /* Expand a call to the builtin sin and cos math functions.
2249 Return NULL_RTX if a normal call should be emitted rather than expanding the
2250 function in-line. EXP is the expression that is a call to the builtin
2251 function; if convenient, the result should be placed in TARGET.
2252 SUBTARGET may be used as the target for computing one of EXP's
2253 operands. */
2255 static rtx
2256 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2258 optab builtin_optab;
2259 rtx op0, insns;
2260 tree fndecl = get_callee_fndecl (exp);
2261 enum machine_mode mode;
2262 tree arg;
2264 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2265 return NULL_RTX;
2267 arg = CALL_EXPR_ARG (exp, 0);
2269 switch (DECL_FUNCTION_CODE (fndecl))
2271 CASE_FLT_FN (BUILT_IN_SIN):
2272 CASE_FLT_FN (BUILT_IN_COS):
2273 builtin_optab = sincos_optab; break;
2274 default:
2275 gcc_unreachable ();
2278 /* Make a suitable register to place result in. */
2279 mode = TYPE_MODE (TREE_TYPE (exp));
2281 /* Check if sincos insn is available, otherwise fallback
2282 to sin or cos insn. */
2283 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2284 switch (DECL_FUNCTION_CODE (fndecl))
2286 CASE_FLT_FN (BUILT_IN_SIN):
2287 builtin_optab = sin_optab; break;
2288 CASE_FLT_FN (BUILT_IN_COS):
2289 builtin_optab = cos_optab; break;
2290 default:
2291 gcc_unreachable ();
2294 /* Before working hard, check whether the instruction is available. */
2295 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2297 target = gen_reg_rtx (mode);
2299 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2300 need to expand the argument again. This way, we will not perform
2301 side-effects more the once. */
2302 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2304 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2306 start_sequence ();
2308 /* Compute into TARGET.
2309 Set TARGET to wherever the result comes back. */
2310 if (builtin_optab == sincos_optab)
2312 int result;
2314 switch (DECL_FUNCTION_CODE (fndecl))
2316 CASE_FLT_FN (BUILT_IN_SIN):
2317 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2318 break;
2319 CASE_FLT_FN (BUILT_IN_COS):
2320 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2321 break;
2322 default:
2323 gcc_unreachable ();
2325 gcc_assert (result);
2327 else
2329 target = expand_unop (mode, builtin_optab, op0, target, 0);
2332 if (target != 0)
2334 /* Output the entire sequence. */
2335 insns = get_insns ();
2336 end_sequence ();
2337 emit_insn (insns);
2338 return target;
2341 /* If we were unable to expand via the builtin, stop the sequence
2342 (without outputting the insns) and call to the library function
2343 with the stabilized argument list. */
2344 end_sequence ();
2347 target = expand_call (exp, target, target == const0_rtx);
2349 return target;
2352 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2353 return an RTL instruction code that implements the functionality.
2354 If that isn't possible or available return CODE_FOR_nothing. */
2356 static enum insn_code
2357 interclass_mathfn_icode (tree arg, tree fndecl)
2359 bool errno_set = false;
2360 optab builtin_optab = 0;
2361 enum machine_mode mode;
2363 switch (DECL_FUNCTION_CODE (fndecl))
2365 CASE_FLT_FN (BUILT_IN_ILOGB):
2366 errno_set = true; builtin_optab = ilogb_optab; break;
2367 CASE_FLT_FN (BUILT_IN_ISINF):
2368 builtin_optab = isinf_optab; break;
2369 case BUILT_IN_ISNORMAL:
2370 case BUILT_IN_ISFINITE:
2371 CASE_FLT_FN (BUILT_IN_FINITE):
2372 case BUILT_IN_FINITED32:
2373 case BUILT_IN_FINITED64:
2374 case BUILT_IN_FINITED128:
2375 case BUILT_IN_ISINFD32:
2376 case BUILT_IN_ISINFD64:
2377 case BUILT_IN_ISINFD128:
2378 /* These builtins have no optabs (yet). */
2379 break;
2380 default:
2381 gcc_unreachable ();
2384 /* There's no easy way to detect the case we need to set EDOM. */
2385 if (flag_errno_math && errno_set)
2386 return CODE_FOR_nothing;
2388 /* Optab mode depends on the mode of the input argument. */
2389 mode = TYPE_MODE (TREE_TYPE (arg));
2391 if (builtin_optab)
2392 return optab_handler (builtin_optab, mode);
2393 return CODE_FOR_nothing;
2396 /* Expand a call to one of the builtin math functions that operate on
2397 floating point argument and output an integer result (ilogb, isinf,
2398 isnan, etc).
2399 Return 0 if a normal call should be emitted rather than expanding the
2400 function in-line. EXP is the expression that is a call to the builtin
2401 function; if convenient, the result should be placed in TARGET. */
2403 static rtx
2404 expand_builtin_interclass_mathfn (tree exp, rtx target)
2406 enum insn_code icode = CODE_FOR_nothing;
2407 rtx op0;
2408 tree fndecl = get_callee_fndecl (exp);
2409 enum machine_mode mode;
2410 tree arg;
2412 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2413 return NULL_RTX;
2415 arg = CALL_EXPR_ARG (exp, 0);
2416 icode = interclass_mathfn_icode (arg, fndecl);
2417 mode = TYPE_MODE (TREE_TYPE (arg));
2419 if (icode != CODE_FOR_nothing)
2421 struct expand_operand ops[1];
2422 rtx last = get_last_insn ();
2423 tree orig_arg = arg;
2425 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2426 need to expand the argument again. This way, we will not perform
2427 side-effects more the once. */
2428 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2430 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2432 if (mode != GET_MODE (op0))
2433 op0 = convert_to_mode (mode, op0, 0);
2435 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2436 if (maybe_legitimize_operands (icode, 0, 1, ops)
2437 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2438 return ops[0].value;
2440 delete_insns_since (last);
2441 CALL_EXPR_ARG (exp, 0) = orig_arg;
2444 return NULL_RTX;
2447 /* Expand a call to the builtin sincos math function.
2448 Return NULL_RTX if a normal call should be emitted rather than expanding the
2449 function in-line. EXP is the expression that is a call to the builtin
2450 function. */
2452 static rtx
2453 expand_builtin_sincos (tree exp)
2455 rtx op0, op1, op2, target1, target2;
2456 enum machine_mode mode;
2457 tree arg, sinp, cosp;
2458 int result;
2459 location_t loc = EXPR_LOCATION (exp);
2460 tree alias_type, alias_off;
2462 if (!validate_arglist (exp, REAL_TYPE,
2463 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2464 return NULL_RTX;
2466 arg = CALL_EXPR_ARG (exp, 0);
2467 sinp = CALL_EXPR_ARG (exp, 1);
2468 cosp = CALL_EXPR_ARG (exp, 2);
2470 /* Make a suitable register to place result in. */
2471 mode = TYPE_MODE (TREE_TYPE (arg));
2473 /* Check if sincos insn is available, otherwise emit the call. */
2474 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2475 return NULL_RTX;
2477 target1 = gen_reg_rtx (mode);
2478 target2 = gen_reg_rtx (mode);
2480 op0 = expand_normal (arg);
2481 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2482 alias_off = build_int_cst (alias_type, 0);
2483 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2484 sinp, alias_off));
2485 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2486 cosp, alias_off));
2488 /* Compute into target1 and target2.
2489 Set TARGET to wherever the result comes back. */
2490 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2491 gcc_assert (result);
2493 /* Move target1 and target2 to the memory locations indicated
2494 by op1 and op2. */
2495 emit_move_insn (op1, target1);
2496 emit_move_insn (op2, target2);
2498 return const0_rtx;
2501 /* Expand a call to the internal cexpi builtin to the sincos math function.
2502 EXP is the expression that is a call to the builtin function; if convenient,
2503 the result should be placed in TARGET. */
2505 static rtx
2506 expand_builtin_cexpi (tree exp, rtx target)
2508 tree fndecl = get_callee_fndecl (exp);
2509 tree arg, type;
2510 enum machine_mode mode;
2511 rtx op0, op1, op2;
2512 location_t loc = EXPR_LOCATION (exp);
2514 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2515 return NULL_RTX;
2517 arg = CALL_EXPR_ARG (exp, 0);
2518 type = TREE_TYPE (arg);
2519 mode = TYPE_MODE (TREE_TYPE (arg));
2521 /* Try expanding via a sincos optab, fall back to emitting a libcall
2522 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2523 is only generated from sincos, cexp or if we have either of them. */
2524 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2526 op1 = gen_reg_rtx (mode);
2527 op2 = gen_reg_rtx (mode);
2529 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2531 /* Compute into op1 and op2. */
2532 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2534 else if (TARGET_HAS_SINCOS)
2536 tree call, fn = NULL_TREE;
2537 tree top1, top2;
2538 rtx op1a, op2a;
2540 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2541 fn = built_in_decls[BUILT_IN_SINCOSF];
2542 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2543 fn = built_in_decls[BUILT_IN_SINCOS];
2544 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2545 fn = built_in_decls[BUILT_IN_SINCOSL];
2546 else
2547 gcc_unreachable ();
2549 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2550 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2551 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2552 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2553 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2554 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2556 /* Make sure not to fold the sincos call again. */
2557 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2558 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2559 call, 3, arg, top1, top2));
2561 else
2563 tree call, fn = NULL_TREE, narg;
2564 tree ctype = build_complex_type (type);
2566 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2567 fn = built_in_decls[BUILT_IN_CEXPF];
2568 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2569 fn = built_in_decls[BUILT_IN_CEXP];
2570 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2571 fn = built_in_decls[BUILT_IN_CEXPL];
2572 else
2573 gcc_unreachable ();
2575 /* If we don't have a decl for cexp create one. This is the
2576 friendliest fallback if the user calls __builtin_cexpi
2577 without full target C99 function support. */
2578 if (fn == NULL_TREE)
2580 tree fntype;
2581 const char *name = NULL;
2583 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2584 name = "cexpf";
2585 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2586 name = "cexp";
2587 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2588 name = "cexpl";
2590 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2591 fn = build_fn_decl (name, fntype);
2594 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2595 build_real (type, dconst0), arg);
2597 /* Make sure not to fold the cexp call again. */
2598 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2599 return expand_expr (build_call_nary (ctype, call, 1, narg),
2600 target, VOIDmode, EXPAND_NORMAL);
2603 /* Now build the proper return type. */
2604 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2605 make_tree (TREE_TYPE (arg), op2),
2606 make_tree (TREE_TYPE (arg), op1)),
2607 target, VOIDmode, EXPAND_NORMAL);
2610 /* Conveniently construct a function call expression. FNDECL names the
2611 function to be called, N is the number of arguments, and the "..."
2612 parameters are the argument expressions. Unlike build_call_exr
2613 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2615 static tree
2616 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2618 va_list ap;
2619 tree fntype = TREE_TYPE (fndecl);
2620 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2622 va_start (ap, n);
2623 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2624 va_end (ap);
2625 SET_EXPR_LOCATION (fn, loc);
2626 return fn;
2629 /* Expand a call to one of the builtin rounding functions gcc defines
2630 as an extension (lfloor and lceil). As these are gcc extensions we
2631 do not need to worry about setting errno to EDOM.
2632 If expanding via optab fails, lower expression to (int)(floor(x)).
2633 EXP is the expression that is a call to the builtin function;
2634 if convenient, the result should be placed in TARGET. */
2636 static rtx
2637 expand_builtin_int_roundingfn (tree exp, rtx target)
2639 convert_optab builtin_optab;
2640 rtx op0, insns, tmp;
2641 tree fndecl = get_callee_fndecl (exp);
2642 enum built_in_function fallback_fn;
2643 tree fallback_fndecl;
2644 enum machine_mode mode;
2645 tree arg;
2647 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2648 gcc_unreachable ();
2650 arg = CALL_EXPR_ARG (exp, 0);
2652 switch (DECL_FUNCTION_CODE (fndecl))
2654 CASE_FLT_FN (BUILT_IN_LCEIL):
2655 CASE_FLT_FN (BUILT_IN_LLCEIL):
2656 builtin_optab = lceil_optab;
2657 fallback_fn = BUILT_IN_CEIL;
2658 break;
2660 CASE_FLT_FN (BUILT_IN_LFLOOR):
2661 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2662 builtin_optab = lfloor_optab;
2663 fallback_fn = BUILT_IN_FLOOR;
2664 break;
2666 default:
2667 gcc_unreachable ();
2670 /* Make a suitable register to place result in. */
2671 mode = TYPE_MODE (TREE_TYPE (exp));
2673 target = gen_reg_rtx (mode);
2675 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2676 need to expand the argument again. This way, we will not perform
2677 side-effects more the once. */
2678 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2680 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2682 start_sequence ();
2684 /* Compute into TARGET. */
2685 if (expand_sfix_optab (target, op0, builtin_optab))
2687 /* Output the entire sequence. */
2688 insns = get_insns ();
2689 end_sequence ();
2690 emit_insn (insns);
2691 return target;
2694 /* If we were unable to expand via the builtin, stop the sequence
2695 (without outputting the insns). */
2696 end_sequence ();
2698 /* Fall back to floating point rounding optab. */
2699 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2701 /* For non-C99 targets we may end up without a fallback fndecl here
2702 if the user called __builtin_lfloor directly. In this case emit
2703 a call to the floor/ceil variants nevertheless. This should result
2704 in the best user experience for not full C99 targets. */
2705 if (fallback_fndecl == NULL_TREE)
2707 tree fntype;
2708 const char *name = NULL;
2710 switch (DECL_FUNCTION_CODE (fndecl))
2712 case BUILT_IN_LCEIL:
2713 case BUILT_IN_LLCEIL:
2714 name = "ceil";
2715 break;
2716 case BUILT_IN_LCEILF:
2717 case BUILT_IN_LLCEILF:
2718 name = "ceilf";
2719 break;
2720 case BUILT_IN_LCEILL:
2721 case BUILT_IN_LLCEILL:
2722 name = "ceill";
2723 break;
2724 case BUILT_IN_LFLOOR:
2725 case BUILT_IN_LLFLOOR:
2726 name = "floor";
2727 break;
2728 case BUILT_IN_LFLOORF:
2729 case BUILT_IN_LLFLOORF:
2730 name = "floorf";
2731 break;
2732 case BUILT_IN_LFLOORL:
2733 case BUILT_IN_LLFLOORL:
2734 name = "floorl";
2735 break;
2736 default:
2737 gcc_unreachable ();
2740 fntype = build_function_type_list (TREE_TYPE (arg),
2741 TREE_TYPE (arg), NULL_TREE);
2742 fallback_fndecl = build_fn_decl (name, fntype);
2745 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2747 tmp = expand_normal (exp);
2749 /* Truncate the result of floating point optab to integer
2750 via expand_fix (). */
2751 target = gen_reg_rtx (mode);
2752 expand_fix (target, tmp, 0);
2754 return target;
2757 /* Expand a call to one of the builtin math functions doing integer
2758 conversion (lrint).
2759 Return 0 if a normal call should be emitted rather than expanding the
2760 function in-line. EXP is the expression that is a call to the builtin
2761 function; if convenient, the result should be placed in TARGET. */
2763 static rtx
2764 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2766 convert_optab builtin_optab;
2767 rtx op0, insns;
2768 tree fndecl = get_callee_fndecl (exp);
2769 tree arg;
2770 enum machine_mode mode;
2772 /* There's no easy way to detect the case we need to set EDOM. */
2773 if (flag_errno_math)
2774 return NULL_RTX;
2776 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2777 gcc_unreachable ();
2779 arg = CALL_EXPR_ARG (exp, 0);
2781 switch (DECL_FUNCTION_CODE (fndecl))
2783 CASE_FLT_FN (BUILT_IN_LRINT):
2784 CASE_FLT_FN (BUILT_IN_LLRINT):
2785 builtin_optab = lrint_optab; break;
2786 CASE_FLT_FN (BUILT_IN_LROUND):
2787 CASE_FLT_FN (BUILT_IN_LLROUND):
2788 builtin_optab = lround_optab; break;
2789 default:
2790 gcc_unreachable ();
2793 /* Make a suitable register to place result in. */
2794 mode = TYPE_MODE (TREE_TYPE (exp));
2796 target = gen_reg_rtx (mode);
2798 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2799 need to expand the argument again. This way, we will not perform
2800 side-effects more the once. */
2801 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2803 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2805 start_sequence ();
2807 if (expand_sfix_optab (target, op0, builtin_optab))
2809 /* Output the entire sequence. */
2810 insns = get_insns ();
2811 end_sequence ();
2812 emit_insn (insns);
2813 return target;
2816 /* If we were unable to expand via the builtin, stop the sequence
2817 (without outputting the insns) and call to the library function
2818 with the stabilized argument list. */
2819 end_sequence ();
2821 target = expand_call (exp, target, target == const0_rtx);
2823 return target;
2826 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2827 a normal call should be emitted rather than expanding the function
2828 in-line. EXP is the expression that is a call to the builtin
2829 function; if convenient, the result should be placed in TARGET. */
2831 static rtx
2832 expand_builtin_powi (tree exp, rtx target)
2834 tree arg0, arg1;
2835 rtx op0, op1;
2836 enum machine_mode mode;
2837 enum machine_mode mode2;
2839 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2840 return NULL_RTX;
2842 arg0 = CALL_EXPR_ARG (exp, 0);
2843 arg1 = CALL_EXPR_ARG (exp, 1);
2844 mode = TYPE_MODE (TREE_TYPE (exp));
2846 /* Emit a libcall to libgcc. */
2848 /* Mode of the 2nd argument must match that of an int. */
2849 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2851 if (target == NULL_RTX)
2852 target = gen_reg_rtx (mode);
2854 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2855 if (GET_MODE (op0) != mode)
2856 op0 = convert_to_mode (mode, op0, 0);
2857 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2858 if (GET_MODE (op1) != mode2)
2859 op1 = convert_to_mode (mode2, op1, 0);
2861 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2862 target, LCT_CONST, mode, 2,
2863 op0, mode, op1, mode2);
2865 return target;
2868 /* Expand expression EXP which is a call to the strlen builtin. Return
2869 NULL_RTX if we failed the caller should emit a normal call, otherwise
2870 try to get the result in TARGET, if convenient. */
2872 static rtx
2873 expand_builtin_strlen (tree exp, rtx target,
2874 enum machine_mode target_mode)
2876 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2877 return NULL_RTX;
2878 else
2880 struct expand_operand ops[4];
2881 rtx pat;
2882 tree len;
2883 tree src = CALL_EXPR_ARG (exp, 0);
2884 rtx src_reg, before_strlen;
2885 enum machine_mode insn_mode = target_mode;
2886 enum insn_code icode = CODE_FOR_nothing;
2887 unsigned int align;
2889 /* If the length can be computed at compile-time, return it. */
2890 len = c_strlen (src, 0);
2891 if (len)
2892 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2894 /* If the length can be computed at compile-time and is constant
2895 integer, but there are side-effects in src, evaluate
2896 src for side-effects, then return len.
2897 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2898 can be optimized into: i++; x = 3; */
2899 len = c_strlen (src, 1);
2900 if (len && TREE_CODE (len) == INTEGER_CST)
2902 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2903 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2906 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2908 /* If SRC is not a pointer type, don't do this operation inline. */
2909 if (align == 0)
2910 return NULL_RTX;
2912 /* Bail out if we can't compute strlen in the right mode. */
2913 while (insn_mode != VOIDmode)
2915 icode = optab_handler (strlen_optab, insn_mode);
2916 if (icode != CODE_FOR_nothing)
2917 break;
2919 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2921 if (insn_mode == VOIDmode)
2922 return NULL_RTX;
2924 /* Make a place to hold the source address. We will not expand
2925 the actual source until we are sure that the expansion will
2926 not fail -- there are trees that cannot be expanded twice. */
2927 src_reg = gen_reg_rtx (Pmode);
2929 /* Mark the beginning of the strlen sequence so we can emit the
2930 source operand later. */
2931 before_strlen = get_last_insn ();
2933 create_output_operand (&ops[0], target, insn_mode);
2934 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2935 create_integer_operand (&ops[2], 0);
2936 create_integer_operand (&ops[3], align);
2937 if (!maybe_expand_insn (icode, 4, ops))
2938 return NULL_RTX;
2940 /* Now that we are assured of success, expand the source. */
2941 start_sequence ();
2942 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
2943 if (pat != src_reg)
2944 emit_move_insn (src_reg, pat);
2945 pat = get_insns ();
2946 end_sequence ();
2948 if (before_strlen)
2949 emit_insn_after (pat, before_strlen);
2950 else
2951 emit_insn_before (pat, get_insns ());
2953 /* Return the value in the proper mode for this function. */
2954 if (GET_MODE (ops[0].value) == target_mode)
2955 target = ops[0].value;
2956 else if (target != 0)
2957 convert_move (target, ops[0].value, 0);
2958 else
2959 target = convert_to_mode (target_mode, ops[0].value, 0);
2961 return target;
2965 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2966 bytes from constant string DATA + OFFSET and return it as target
2967 constant. */
2969 static rtx
2970 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2971 enum machine_mode mode)
2973 const char *str = (const char *) data;
2975 gcc_assert (offset >= 0
2976 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2977 <= strlen (str) + 1));
2979 return c_readstr (str + offset, mode);
2982 /* Expand a call EXP to the memcpy builtin.
2983 Return NULL_RTX if we failed, the caller should emit a normal call,
2984 otherwise try to get the result in TARGET, if convenient (and in
2985 mode MODE if that's convenient). */
2987 static rtx
2988 expand_builtin_memcpy (tree exp, rtx target)
2990 if (!validate_arglist (exp,
2991 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2992 return NULL_RTX;
2993 else
2995 tree dest = CALL_EXPR_ARG (exp, 0);
2996 tree src = CALL_EXPR_ARG (exp, 1);
2997 tree len = CALL_EXPR_ARG (exp, 2);
2998 const char *src_str;
2999 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3000 unsigned int dest_align
3001 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3002 rtx dest_mem, src_mem, dest_addr, len_rtx;
3003 HOST_WIDE_INT expected_size = -1;
3004 unsigned int expected_align = 0;
3006 /* If DEST is not a pointer type, call the normal function. */
3007 if (dest_align == 0)
3008 return NULL_RTX;
3010 /* If either SRC is not a pointer type, don't do this
3011 operation in-line. */
3012 if (src_align == 0)
3013 return NULL_RTX;
3015 if (currently_expanding_gimple_stmt)
3016 stringop_block_profile (currently_expanding_gimple_stmt,
3017 &expected_align, &expected_size);
3019 if (expected_align < dest_align)
3020 expected_align = dest_align;
3021 dest_mem = get_memory_rtx (dest, len);
3022 set_mem_align (dest_mem, dest_align);
3023 len_rtx = expand_normal (len);
3024 src_str = c_getstr (src);
3026 /* If SRC is a string constant and block move would be done
3027 by pieces, we can avoid loading the string from memory
3028 and only stored the computed constants. */
3029 if (src_str
3030 && CONST_INT_P (len_rtx)
3031 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3032 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3033 CONST_CAST (char *, src_str),
3034 dest_align, false))
3036 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3037 builtin_memcpy_read_str,
3038 CONST_CAST (char *, src_str),
3039 dest_align, false, 0);
3040 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3041 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3042 return dest_mem;
3045 src_mem = get_memory_rtx (src, len);
3046 set_mem_align (src_mem, src_align);
3048 /* Copy word part most expediently. */
3049 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3050 CALL_EXPR_TAILCALL (exp)
3051 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3052 expected_align, expected_size);
3054 if (dest_addr == 0)
3056 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3057 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3059 return dest_addr;
3063 /* Expand a call EXP to the mempcpy builtin.
3064 Return NULL_RTX if we failed; the caller should emit a normal call,
3065 otherwise try to get the result in TARGET, if convenient (and in
3066 mode MODE if that's convenient). If ENDP is 0 return the
3067 destination pointer, if ENDP is 1 return the end pointer ala
3068 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3069 stpcpy. */
3071 static rtx
3072 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3074 if (!validate_arglist (exp,
3075 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3076 return NULL_RTX;
3077 else
3079 tree dest = CALL_EXPR_ARG (exp, 0);
3080 tree src = CALL_EXPR_ARG (exp, 1);
3081 tree len = CALL_EXPR_ARG (exp, 2);
3082 return expand_builtin_mempcpy_args (dest, src, len,
3083 target, mode, /*endp=*/ 1);
3087 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3088 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3089 so that this can also be called without constructing an actual CALL_EXPR.
3090 The other arguments and return value are the same as for
3091 expand_builtin_mempcpy. */
3093 static rtx
3094 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3095 rtx target, enum machine_mode mode, int endp)
3097 /* If return value is ignored, transform mempcpy into memcpy. */
3098 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3100 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3101 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3102 dest, src, len);
3103 return expand_expr (result, target, mode, EXPAND_NORMAL);
3105 else
3107 const char *src_str;
3108 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3109 unsigned int dest_align
3110 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3111 rtx dest_mem, src_mem, len_rtx;
3113 /* If either SRC or DEST is not a pointer type, don't do this
3114 operation in-line. */
3115 if (dest_align == 0 || src_align == 0)
3116 return NULL_RTX;
3118 /* If LEN is not constant, call the normal function. */
3119 if (! host_integerp (len, 1))
3120 return NULL_RTX;
3122 len_rtx = expand_normal (len);
3123 src_str = c_getstr (src);
3125 /* If SRC is a string constant and block move would be done
3126 by pieces, we can avoid loading the string from memory
3127 and only stored the computed constants. */
3128 if (src_str
3129 && CONST_INT_P (len_rtx)
3130 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3131 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3132 CONST_CAST (char *, src_str),
3133 dest_align, false))
3135 dest_mem = get_memory_rtx (dest, len);
3136 set_mem_align (dest_mem, dest_align);
3137 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3138 builtin_memcpy_read_str,
3139 CONST_CAST (char *, src_str),
3140 dest_align, false, endp);
3141 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3142 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3143 return dest_mem;
3146 if (CONST_INT_P (len_rtx)
3147 && can_move_by_pieces (INTVAL (len_rtx),
3148 MIN (dest_align, src_align)))
3150 dest_mem = get_memory_rtx (dest, len);
3151 set_mem_align (dest_mem, dest_align);
3152 src_mem = get_memory_rtx (src, len);
3153 set_mem_align (src_mem, src_align);
3154 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3155 MIN (dest_align, src_align), endp);
3156 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3157 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3158 return dest_mem;
3161 return NULL_RTX;
3165 #ifndef HAVE_movstr
3166 # define HAVE_movstr 0
3167 # define CODE_FOR_movstr CODE_FOR_nothing
3168 #endif
3170 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3171 we failed, the caller should emit a normal call, otherwise try to
3172 get the result in TARGET, if convenient. If ENDP is 0 return the
3173 destination pointer, if ENDP is 1 return the end pointer ala
3174 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3175 stpcpy. */
3177 static rtx
3178 expand_movstr (tree dest, tree src, rtx target, int endp)
3180 struct expand_operand ops[3];
3181 rtx dest_mem;
3182 rtx src_mem;
3184 if (!HAVE_movstr)
3185 return NULL_RTX;
3187 dest_mem = get_memory_rtx (dest, NULL);
3188 src_mem = get_memory_rtx (src, NULL);
3189 if (!endp)
3191 target = force_reg (Pmode, XEXP (dest_mem, 0));
3192 dest_mem = replace_equiv_address (dest_mem, target);
3195 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3196 create_fixed_operand (&ops[1], dest_mem);
3197 create_fixed_operand (&ops[2], src_mem);
3198 expand_insn (CODE_FOR_movstr, 3, ops);
3200 if (endp && target != const0_rtx)
3202 target = ops[0].value;
3203 /* movstr is supposed to set end to the address of the NUL
3204 terminator. If the caller requested a mempcpy-like return value,
3205 adjust it. */
3206 if (endp == 1)
3208 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), target), 1);
3209 emit_move_insn (target, force_operand (tem, NULL_RTX));
3212 return target;
3215 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3216 NULL_RTX if we failed the caller should emit a normal call, otherwise
3217 try to get the result in TARGET, if convenient (and in mode MODE if that's
3218 convenient). */
3220 static rtx
3221 expand_builtin_strcpy (tree exp, rtx target)
3223 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3225 tree dest = CALL_EXPR_ARG (exp, 0);
3226 tree src = CALL_EXPR_ARG (exp, 1);
3227 return expand_builtin_strcpy_args (dest, src, target);
3229 return NULL_RTX;
3232 /* Helper function to do the actual work for expand_builtin_strcpy. The
3233 arguments to the builtin_strcpy call DEST and SRC are broken out
3234 so that this can also be called without constructing an actual CALL_EXPR.
3235 The other arguments and return value are the same as for
3236 expand_builtin_strcpy. */
3238 static rtx
3239 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3241 return expand_movstr (dest, src, target, /*endp=*/0);
3244 /* Expand a call EXP to the stpcpy builtin.
3245 Return NULL_RTX if we failed the caller should emit a normal call,
3246 otherwise try to get the result in TARGET, if convenient (and in
3247 mode MODE if that's convenient). */
3249 static rtx
3250 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3252 tree dst, src;
3253 location_t loc = EXPR_LOCATION (exp);
3255 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3256 return NULL_RTX;
3258 dst = CALL_EXPR_ARG (exp, 0);
3259 src = CALL_EXPR_ARG (exp, 1);
3261 /* If return value is ignored, transform stpcpy into strcpy. */
3262 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3264 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3265 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3266 return expand_expr (result, target, mode, EXPAND_NORMAL);
3268 else
3270 tree len, lenp1;
3271 rtx ret;
3273 /* Ensure we get an actual string whose length can be evaluated at
3274 compile-time, not an expression containing a string. This is
3275 because the latter will potentially produce pessimized code
3276 when used to produce the return value. */
3277 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3278 return expand_movstr (dst, src, target, /*endp=*/2);
3280 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3281 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3282 target, mode, /*endp=*/2);
3284 if (ret)
3285 return ret;
3287 if (TREE_CODE (len) == INTEGER_CST)
3289 rtx len_rtx = expand_normal (len);
3291 if (CONST_INT_P (len_rtx))
3293 ret = expand_builtin_strcpy_args (dst, src, target);
3295 if (ret)
3297 if (! target)
3299 if (mode != VOIDmode)
3300 target = gen_reg_rtx (mode);
3301 else
3302 target = gen_reg_rtx (GET_MODE (ret));
3304 if (GET_MODE (target) != GET_MODE (ret))
3305 ret = gen_lowpart (GET_MODE (target), ret);
3307 ret = plus_constant (ret, INTVAL (len_rtx));
3308 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3309 gcc_assert (ret);
3311 return target;
3316 return expand_movstr (dst, src, target, /*endp=*/2);
3320 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3321 bytes from constant string DATA + OFFSET and return it as target
3322 constant. */
3325 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3326 enum machine_mode mode)
3328 const char *str = (const char *) data;
3330 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3331 return const0_rtx;
3333 return c_readstr (str + offset, mode);
3336 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3337 NULL_RTX if we failed the caller should emit a normal call. */
3339 static rtx
3340 expand_builtin_strncpy (tree exp, rtx target)
3342 location_t loc = EXPR_LOCATION (exp);
3344 if (validate_arglist (exp,
3345 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3347 tree dest = CALL_EXPR_ARG (exp, 0);
3348 tree src = CALL_EXPR_ARG (exp, 1);
3349 tree len = CALL_EXPR_ARG (exp, 2);
3350 tree slen = c_strlen (src, 1);
3352 /* We must be passed a constant len and src parameter. */
3353 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3354 return NULL_RTX;
3356 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3358 /* We're required to pad with trailing zeros if the requested
3359 len is greater than strlen(s2)+1. In that case try to
3360 use store_by_pieces, if it fails, punt. */
3361 if (tree_int_cst_lt (slen, len))
3363 unsigned int dest_align
3364 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3365 const char *p = c_getstr (src);
3366 rtx dest_mem;
3368 if (!p || dest_align == 0 || !host_integerp (len, 1)
3369 || !can_store_by_pieces (tree_low_cst (len, 1),
3370 builtin_strncpy_read_str,
3371 CONST_CAST (char *, p),
3372 dest_align, false))
3373 return NULL_RTX;
3375 dest_mem = get_memory_rtx (dest, len);
3376 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3377 builtin_strncpy_read_str,
3378 CONST_CAST (char *, p), dest_align, false, 0);
3379 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3380 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3381 return dest_mem;
3384 return NULL_RTX;
3387 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3388 bytes from constant string DATA + OFFSET and return it as target
3389 constant. */
3392 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3393 enum machine_mode mode)
3395 const char *c = (const char *) data;
3396 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3398 memset (p, *c, GET_MODE_SIZE (mode));
3400 return c_readstr (p, mode);
3403 /* Callback routine for store_by_pieces. Return the RTL of a register
3404 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3405 char value given in the RTL register data. For example, if mode is
3406 4 bytes wide, return the RTL for 0x01010101*data. */
3408 static rtx
3409 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3410 enum machine_mode mode)
3412 rtx target, coeff;
3413 size_t size;
3414 char *p;
3416 size = GET_MODE_SIZE (mode);
3417 if (size == 1)
3418 return (rtx) data;
3420 p = XALLOCAVEC (char, size);
3421 memset (p, 1, size);
3422 coeff = c_readstr (p, mode);
3424 target = convert_to_mode (mode, (rtx) data, 1);
3425 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3426 return force_reg (mode, target);
3429 /* Expand expression EXP, which is a call to the memset builtin. Return
3430 NULL_RTX if we failed the caller should emit a normal call, otherwise
3431 try to get the result in TARGET, if convenient (and in mode MODE if that's
3432 convenient). */
3434 static rtx
3435 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3437 if (!validate_arglist (exp,
3438 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3439 return NULL_RTX;
3440 else
3442 tree dest = CALL_EXPR_ARG (exp, 0);
3443 tree val = CALL_EXPR_ARG (exp, 1);
3444 tree len = CALL_EXPR_ARG (exp, 2);
3445 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3449 /* Helper function to do the actual work for expand_builtin_memset. The
3450 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3451 so that this can also be called without constructing an actual CALL_EXPR.
3452 The other arguments and return value are the same as for
3453 expand_builtin_memset. */
3455 static rtx
3456 expand_builtin_memset_args (tree dest, tree val, tree len,
3457 rtx target, enum machine_mode mode, tree orig_exp)
3459 tree fndecl, fn;
3460 enum built_in_function fcode;
3461 enum machine_mode val_mode;
3462 char c;
3463 unsigned int dest_align;
3464 rtx dest_mem, dest_addr, len_rtx;
3465 HOST_WIDE_INT expected_size = -1;
3466 unsigned int expected_align = 0;
3468 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3470 /* If DEST is not a pointer type, don't do this operation in-line. */
3471 if (dest_align == 0)
3472 return NULL_RTX;
3474 if (currently_expanding_gimple_stmt)
3475 stringop_block_profile (currently_expanding_gimple_stmt,
3476 &expected_align, &expected_size);
3478 if (expected_align < dest_align)
3479 expected_align = dest_align;
3481 /* If the LEN parameter is zero, return DEST. */
3482 if (integer_zerop (len))
3484 /* Evaluate and ignore VAL in case it has side-effects. */
3485 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3486 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3489 /* Stabilize the arguments in case we fail. */
3490 dest = builtin_save_expr (dest);
3491 val = builtin_save_expr (val);
3492 len = builtin_save_expr (len);
3494 len_rtx = expand_normal (len);
3495 dest_mem = get_memory_rtx (dest, len);
3496 val_mode = TYPE_MODE (unsigned_char_type_node);
3498 if (TREE_CODE (val) != INTEGER_CST)
3500 rtx val_rtx;
3502 val_rtx = expand_normal (val);
3503 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3505 /* Assume that we can memset by pieces if we can store
3506 * the coefficients by pieces (in the required modes).
3507 * We can't pass builtin_memset_gen_str as that emits RTL. */
3508 c = 1;
3509 if (host_integerp (len, 1)
3510 && can_store_by_pieces (tree_low_cst (len, 1),
3511 builtin_memset_read_str, &c, dest_align,
3512 true))
3514 val_rtx = force_reg (val_mode, val_rtx);
3515 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3516 builtin_memset_gen_str, val_rtx, dest_align,
3517 true, 0);
3519 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3520 dest_align, expected_align,
3521 expected_size))
3522 goto do_libcall;
3524 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3525 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3526 return dest_mem;
3529 if (target_char_cast (val, &c))
3530 goto do_libcall;
3532 if (c)
3534 if (host_integerp (len, 1)
3535 && can_store_by_pieces (tree_low_cst (len, 1),
3536 builtin_memset_read_str, &c, dest_align,
3537 true))
3538 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3539 builtin_memset_read_str, &c, dest_align, true, 0);
3540 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3541 gen_int_mode (c, val_mode),
3542 dest_align, expected_align,
3543 expected_size))
3544 goto do_libcall;
3546 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3547 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3548 return dest_mem;
3551 set_mem_align (dest_mem, dest_align);
3552 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3553 CALL_EXPR_TAILCALL (orig_exp)
3554 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3555 expected_align, expected_size);
3557 if (dest_addr == 0)
3559 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3560 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3563 return dest_addr;
3565 do_libcall:
3566 fndecl = get_callee_fndecl (orig_exp);
3567 fcode = DECL_FUNCTION_CODE (fndecl);
3568 if (fcode == BUILT_IN_MEMSET)
3569 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3570 dest, val, len);
3571 else if (fcode == BUILT_IN_BZERO)
3572 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3573 dest, len);
3574 else
3575 gcc_unreachable ();
3576 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3577 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3578 return expand_call (fn, target, target == const0_rtx);
3581 /* Expand expression EXP, which is a call to the bzero builtin. Return
3582 NULL_RTX if we failed the caller should emit a normal call. */
3584 static rtx
3585 expand_builtin_bzero (tree exp)
3587 tree dest, size;
3588 location_t loc = EXPR_LOCATION (exp);
3590 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3591 return NULL_RTX;
3593 dest = CALL_EXPR_ARG (exp, 0);
3594 size = CALL_EXPR_ARG (exp, 1);
3596 /* New argument list transforming bzero(ptr x, int y) to
3597 memset(ptr x, int 0, size_t y). This is done this way
3598 so that if it isn't expanded inline, we fallback to
3599 calling bzero instead of memset. */
3601 return expand_builtin_memset_args (dest, integer_zero_node,
3602 fold_convert_loc (loc, sizetype, size),
3603 const0_rtx, VOIDmode, exp);
3606 /* Expand expression EXP, which is a call to the memcmp built-in function.
3607 Return NULL_RTX if we failed and the
3608 caller should emit a normal call, otherwise try to get the result in
3609 TARGET, if convenient (and in mode MODE, if that's convenient). */
3611 static rtx
3612 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3613 ATTRIBUTE_UNUSED enum machine_mode mode)
3615 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3617 if (!validate_arglist (exp,
3618 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3619 return NULL_RTX;
3621 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3623 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3624 rtx result;
3625 rtx insn;
3626 tree arg1 = CALL_EXPR_ARG (exp, 0);
3627 tree arg2 = CALL_EXPR_ARG (exp, 1);
3628 tree len = CALL_EXPR_ARG (exp, 2);
3630 unsigned int arg1_align
3631 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3632 unsigned int arg2_align
3633 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3634 enum machine_mode insn_mode;
3636 #ifdef HAVE_cmpmemsi
3637 if (HAVE_cmpmemsi)
3638 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3639 else
3640 #endif
3641 #ifdef HAVE_cmpstrnsi
3642 if (HAVE_cmpstrnsi)
3643 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3644 else
3645 #endif
3646 return NULL_RTX;
3648 /* If we don't have POINTER_TYPE, call the function. */
3649 if (arg1_align == 0 || arg2_align == 0)
3650 return NULL_RTX;
3652 /* Make a place to write the result of the instruction. */
3653 result = target;
3654 if (! (result != 0
3655 && REG_P (result) && GET_MODE (result) == insn_mode
3656 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3657 result = gen_reg_rtx (insn_mode);
3659 arg1_rtx = get_memory_rtx (arg1, len);
3660 arg2_rtx = get_memory_rtx (arg2, len);
3661 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3663 /* Set MEM_SIZE as appropriate. */
3664 if (CONST_INT_P (arg3_rtx))
3666 set_mem_size (arg1_rtx, arg3_rtx);
3667 set_mem_size (arg2_rtx, arg3_rtx);
3670 #ifdef HAVE_cmpmemsi
3671 if (HAVE_cmpmemsi)
3672 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3673 GEN_INT (MIN (arg1_align, arg2_align)));
3674 else
3675 #endif
3676 #ifdef HAVE_cmpstrnsi
3677 if (HAVE_cmpstrnsi)
3678 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3679 GEN_INT (MIN (arg1_align, arg2_align)));
3680 else
3681 #endif
3682 gcc_unreachable ();
3684 if (insn)
3685 emit_insn (insn);
3686 else
3687 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3688 TYPE_MODE (integer_type_node), 3,
3689 XEXP (arg1_rtx, 0), Pmode,
3690 XEXP (arg2_rtx, 0), Pmode,
3691 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3692 TYPE_UNSIGNED (sizetype)),
3693 TYPE_MODE (sizetype));
3695 /* Return the value in the proper mode for this function. */
3696 mode = TYPE_MODE (TREE_TYPE (exp));
3697 if (GET_MODE (result) == mode)
3698 return result;
3699 else if (target != 0)
3701 convert_move (target, result, 0);
3702 return target;
3704 else
3705 return convert_to_mode (mode, result, 0);
3707 #endif
3709 return NULL_RTX;
3712 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3713 if we failed the caller should emit a normal call, otherwise try to get
3714 the result in TARGET, if convenient. */
3716 static rtx
3717 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3719 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3720 return NULL_RTX;
3722 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3723 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3724 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3726 rtx arg1_rtx, arg2_rtx;
3727 rtx result, insn = NULL_RTX;
3728 tree fndecl, fn;
3729 tree arg1 = CALL_EXPR_ARG (exp, 0);
3730 tree arg2 = CALL_EXPR_ARG (exp, 1);
3732 unsigned int arg1_align
3733 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3734 unsigned int arg2_align
3735 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3737 /* If we don't have POINTER_TYPE, call the function. */
3738 if (arg1_align == 0 || arg2_align == 0)
3739 return NULL_RTX;
3741 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3742 arg1 = builtin_save_expr (arg1);
3743 arg2 = builtin_save_expr (arg2);
3745 arg1_rtx = get_memory_rtx (arg1, NULL);
3746 arg2_rtx = get_memory_rtx (arg2, NULL);
3748 #ifdef HAVE_cmpstrsi
3749 /* Try to call cmpstrsi. */
3750 if (HAVE_cmpstrsi)
3752 enum machine_mode insn_mode
3753 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3755 /* Make a place to write the result of the instruction. */
3756 result = target;
3757 if (! (result != 0
3758 && REG_P (result) && GET_MODE (result) == insn_mode
3759 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3760 result = gen_reg_rtx (insn_mode);
3762 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3763 GEN_INT (MIN (arg1_align, arg2_align)));
3765 #endif
3766 #ifdef HAVE_cmpstrnsi
3767 /* Try to determine at least one length and call cmpstrnsi. */
3768 if (!insn && HAVE_cmpstrnsi)
3770 tree len;
3771 rtx arg3_rtx;
3773 enum machine_mode insn_mode
3774 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3775 tree len1 = c_strlen (arg1, 1);
3776 tree len2 = c_strlen (arg2, 1);
3778 if (len1)
3779 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3780 if (len2)
3781 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3783 /* If we don't have a constant length for the first, use the length
3784 of the second, if we know it. We don't require a constant for
3785 this case; some cost analysis could be done if both are available
3786 but neither is constant. For now, assume they're equally cheap,
3787 unless one has side effects. If both strings have constant lengths,
3788 use the smaller. */
3790 if (!len1)
3791 len = len2;
3792 else if (!len2)
3793 len = len1;
3794 else if (TREE_SIDE_EFFECTS (len1))
3795 len = len2;
3796 else if (TREE_SIDE_EFFECTS (len2))
3797 len = len1;
3798 else if (TREE_CODE (len1) != INTEGER_CST)
3799 len = len2;
3800 else if (TREE_CODE (len2) != INTEGER_CST)
3801 len = len1;
3802 else if (tree_int_cst_lt (len1, len2))
3803 len = len1;
3804 else
3805 len = len2;
3807 /* If both arguments have side effects, we cannot optimize. */
3808 if (!len || TREE_SIDE_EFFECTS (len))
3809 goto do_libcall;
3811 arg3_rtx = expand_normal (len);
3813 /* Make a place to write the result of the instruction. */
3814 result = target;
3815 if (! (result != 0
3816 && REG_P (result) && GET_MODE (result) == insn_mode
3817 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3818 result = gen_reg_rtx (insn_mode);
3820 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3821 GEN_INT (MIN (arg1_align, arg2_align)));
3823 #endif
3825 if (insn)
3827 enum machine_mode mode;
3828 emit_insn (insn);
3830 /* Return the value in the proper mode for this function. */
3831 mode = TYPE_MODE (TREE_TYPE (exp));
3832 if (GET_MODE (result) == mode)
3833 return result;
3834 if (target == 0)
3835 return convert_to_mode (mode, result, 0);
3836 convert_move (target, result, 0);
3837 return target;
3840 /* Expand the library call ourselves using a stabilized argument
3841 list to avoid re-evaluating the function's arguments twice. */
3842 #ifdef HAVE_cmpstrnsi
3843 do_libcall:
3844 #endif
3845 fndecl = get_callee_fndecl (exp);
3846 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3847 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3848 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3849 return expand_call (fn, target, target == const0_rtx);
3851 #endif
3852 return NULL_RTX;
3855 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3856 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3857 the result in TARGET, if convenient. */
3859 static rtx
3860 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3861 ATTRIBUTE_UNUSED enum machine_mode mode)
3863 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3865 if (!validate_arglist (exp,
3866 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3867 return NULL_RTX;
3869 /* If c_strlen can determine an expression for one of the string
3870 lengths, and it doesn't have side effects, then emit cmpstrnsi
3871 using length MIN(strlen(string)+1, arg3). */
3872 #ifdef HAVE_cmpstrnsi
3873 if (HAVE_cmpstrnsi)
3875 tree len, len1, len2;
3876 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3877 rtx result, insn;
3878 tree fndecl, fn;
3879 tree arg1 = CALL_EXPR_ARG (exp, 0);
3880 tree arg2 = CALL_EXPR_ARG (exp, 1);
3881 tree arg3 = CALL_EXPR_ARG (exp, 2);
3883 unsigned int arg1_align
3884 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3885 unsigned int arg2_align
3886 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3887 enum machine_mode insn_mode
3888 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3890 len1 = c_strlen (arg1, 1);
3891 len2 = c_strlen (arg2, 1);
3893 if (len1)
3894 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3895 if (len2)
3896 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3898 /* If we don't have a constant length for the first, use the length
3899 of the second, if we know it. We don't require a constant for
3900 this case; some cost analysis could be done if both are available
3901 but neither is constant. For now, assume they're equally cheap,
3902 unless one has side effects. If both strings have constant lengths,
3903 use the smaller. */
3905 if (!len1)
3906 len = len2;
3907 else if (!len2)
3908 len = len1;
3909 else if (TREE_SIDE_EFFECTS (len1))
3910 len = len2;
3911 else if (TREE_SIDE_EFFECTS (len2))
3912 len = len1;
3913 else if (TREE_CODE (len1) != INTEGER_CST)
3914 len = len2;
3915 else if (TREE_CODE (len2) != INTEGER_CST)
3916 len = len1;
3917 else if (tree_int_cst_lt (len1, len2))
3918 len = len1;
3919 else
3920 len = len2;
3922 /* If both arguments have side effects, we cannot optimize. */
3923 if (!len || TREE_SIDE_EFFECTS (len))
3924 return NULL_RTX;
3926 /* The actual new length parameter is MIN(len,arg3). */
3927 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3928 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3930 /* If we don't have POINTER_TYPE, call the function. */
3931 if (arg1_align == 0 || arg2_align == 0)
3932 return NULL_RTX;
3934 /* Make a place to write the result of the instruction. */
3935 result = target;
3936 if (! (result != 0
3937 && REG_P (result) && GET_MODE (result) == insn_mode
3938 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3939 result = gen_reg_rtx (insn_mode);
3941 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3942 arg1 = builtin_save_expr (arg1);
3943 arg2 = builtin_save_expr (arg2);
3944 len = builtin_save_expr (len);
3946 arg1_rtx = get_memory_rtx (arg1, len);
3947 arg2_rtx = get_memory_rtx (arg2, len);
3948 arg3_rtx = expand_normal (len);
3949 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3950 GEN_INT (MIN (arg1_align, arg2_align)));
3951 if (insn)
3953 emit_insn (insn);
3955 /* Return the value in the proper mode for this function. */
3956 mode = TYPE_MODE (TREE_TYPE (exp));
3957 if (GET_MODE (result) == mode)
3958 return result;
3959 if (target == 0)
3960 return convert_to_mode (mode, result, 0);
3961 convert_move (target, result, 0);
3962 return target;
3965 /* Expand the library call ourselves using a stabilized argument
3966 list to avoid re-evaluating the function's arguments twice. */
3967 fndecl = get_callee_fndecl (exp);
3968 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3969 arg1, arg2, len);
3970 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3971 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3972 return expand_call (fn, target, target == const0_rtx);
3974 #endif
3975 return NULL_RTX;
3978 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3979 if that's convenient. */
3982 expand_builtin_saveregs (void)
3984 rtx val, seq;
3986 /* Don't do __builtin_saveregs more than once in a function.
3987 Save the result of the first call and reuse it. */
3988 if (saveregs_value != 0)
3989 return saveregs_value;
3991 /* When this function is called, it means that registers must be
3992 saved on entry to this function. So we migrate the call to the
3993 first insn of this function. */
3995 start_sequence ();
3997 /* Do whatever the machine needs done in this case. */
3998 val = targetm.calls.expand_builtin_saveregs ();
4000 seq = get_insns ();
4001 end_sequence ();
4003 saveregs_value = val;
4005 /* Put the insns after the NOTE that starts the function. If this
4006 is inside a start_sequence, make the outer-level insn chain current, so
4007 the code is placed at the start of the function. */
4008 push_topmost_sequence ();
4009 emit_insn_after (seq, entry_of_function ());
4010 pop_topmost_sequence ();
4012 return val;
4015 /* Expand a call to __builtin_next_arg. */
4017 static rtx
4018 expand_builtin_next_arg (void)
4020 /* Checking arguments is already done in fold_builtin_next_arg
4021 that must be called before this function. */
4022 return expand_binop (ptr_mode, add_optab,
4023 crtl->args.internal_arg_pointer,
4024 crtl->args.arg_offset_rtx,
4025 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4028 /* Make it easier for the backends by protecting the valist argument
4029 from multiple evaluations. */
4031 static tree
4032 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4034 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4036 /* The current way of determining the type of valist is completely
4037 bogus. We should have the information on the va builtin instead. */
4038 if (!vatype)
4039 vatype = targetm.fn_abi_va_list (cfun->decl);
4041 if (TREE_CODE (vatype) == ARRAY_TYPE)
4043 if (TREE_SIDE_EFFECTS (valist))
4044 valist = save_expr (valist);
4046 /* For this case, the backends will be expecting a pointer to
4047 vatype, but it's possible we've actually been given an array
4048 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4049 So fix it. */
4050 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4052 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4053 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4056 else
4058 tree pt = build_pointer_type (vatype);
4060 if (! needs_lvalue)
4062 if (! TREE_SIDE_EFFECTS (valist))
4063 return valist;
4065 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4066 TREE_SIDE_EFFECTS (valist) = 1;
4069 if (TREE_SIDE_EFFECTS (valist))
4070 valist = save_expr (valist);
4071 valist = fold_build2_loc (loc, MEM_REF,
4072 vatype, valist, build_int_cst (pt, 0));
4075 return valist;
4078 /* The "standard" definition of va_list is void*. */
4080 tree
4081 std_build_builtin_va_list (void)
4083 return ptr_type_node;
4086 /* The "standard" abi va_list is va_list_type_node. */
4088 tree
4089 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4091 return va_list_type_node;
4094 /* The "standard" type of va_list is va_list_type_node. */
4096 tree
4097 std_canonical_va_list_type (tree type)
4099 tree wtype, htype;
4101 if (INDIRECT_REF_P (type))
4102 type = TREE_TYPE (type);
4103 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4104 type = TREE_TYPE (type);
4105 wtype = va_list_type_node;
4106 htype = type;
4107 /* Treat structure va_list types. */
4108 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4109 htype = TREE_TYPE (htype);
4110 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4112 /* If va_list is an array type, the argument may have decayed
4113 to a pointer type, e.g. by being passed to another function.
4114 In that case, unwrap both types so that we can compare the
4115 underlying records. */
4116 if (TREE_CODE (htype) == ARRAY_TYPE
4117 || POINTER_TYPE_P (htype))
4119 wtype = TREE_TYPE (wtype);
4120 htype = TREE_TYPE (htype);
4123 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4124 return va_list_type_node;
4126 return NULL_TREE;
4129 /* The "standard" implementation of va_start: just assign `nextarg' to
4130 the variable. */
4132 void
4133 std_expand_builtin_va_start (tree valist, rtx nextarg)
4135 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4136 convert_move (va_r, nextarg, 0);
4139 /* Expand EXP, a call to __builtin_va_start. */
4141 static rtx
4142 expand_builtin_va_start (tree exp)
4144 rtx nextarg;
4145 tree valist;
4146 location_t loc = EXPR_LOCATION (exp);
4148 if (call_expr_nargs (exp) < 2)
4150 error_at (loc, "too few arguments to function %<va_start%>");
4151 return const0_rtx;
4154 if (fold_builtin_next_arg (exp, true))
4155 return const0_rtx;
4157 nextarg = expand_builtin_next_arg ();
4158 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4160 if (targetm.expand_builtin_va_start)
4161 targetm.expand_builtin_va_start (valist, nextarg);
4162 else
4163 std_expand_builtin_va_start (valist, nextarg);
4165 return const0_rtx;
4168 /* The "standard" implementation of va_arg: read the value from the
4169 current (padded) address and increment by the (padded) size. */
4171 tree
4172 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4173 gimple_seq *post_p)
4175 tree addr, t, type_size, rounded_size, valist_tmp;
4176 unsigned HOST_WIDE_INT align, boundary;
4177 bool indirect;
4179 #ifdef ARGS_GROW_DOWNWARD
4180 /* All of the alignment and movement below is for args-grow-up machines.
4181 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4182 implement their own specialized gimplify_va_arg_expr routines. */
4183 gcc_unreachable ();
4184 #endif
4186 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4187 if (indirect)
4188 type = build_pointer_type (type);
4190 align = PARM_BOUNDARY / BITS_PER_UNIT;
4191 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4193 /* When we align parameter on stack for caller, if the parameter
4194 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4195 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4196 here with caller. */
4197 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4198 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4200 boundary /= BITS_PER_UNIT;
4202 /* Hoist the valist value into a temporary for the moment. */
4203 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4205 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4206 requires greater alignment, we must perform dynamic alignment. */
4207 if (boundary > align
4208 && !integer_zerop (TYPE_SIZE (type)))
4210 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4211 fold_build2 (POINTER_PLUS_EXPR,
4212 TREE_TYPE (valist),
4213 valist_tmp, size_int (boundary - 1)));
4214 gimplify_and_add (t, pre_p);
4216 t = fold_convert (sizetype, valist_tmp);
4217 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4218 fold_convert (TREE_TYPE (valist),
4219 fold_build2 (BIT_AND_EXPR, sizetype, t,
4220 size_int (-boundary))));
4221 gimplify_and_add (t, pre_p);
4223 else
4224 boundary = align;
4226 /* If the actual alignment is less than the alignment of the type,
4227 adjust the type accordingly so that we don't assume strict alignment
4228 when dereferencing the pointer. */
4229 boundary *= BITS_PER_UNIT;
4230 if (boundary < TYPE_ALIGN (type))
4232 type = build_variant_type_copy (type);
4233 TYPE_ALIGN (type) = boundary;
4236 /* Compute the rounded size of the type. */
4237 type_size = size_in_bytes (type);
4238 rounded_size = round_up (type_size, align);
4240 /* Reduce rounded_size so it's sharable with the postqueue. */
4241 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4243 /* Get AP. */
4244 addr = valist_tmp;
4245 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4247 /* Small args are padded downward. */
4248 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4249 rounded_size, size_int (align));
4250 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4251 size_binop (MINUS_EXPR, rounded_size, type_size));
4252 addr = fold_build2 (POINTER_PLUS_EXPR,
4253 TREE_TYPE (addr), addr, t);
4256 /* Compute new value for AP. */
4257 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4258 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4259 gimplify_and_add (t, pre_p);
4261 addr = fold_convert (build_pointer_type (type), addr);
4263 if (indirect)
4264 addr = build_va_arg_indirect_ref (addr);
4266 return build_va_arg_indirect_ref (addr);
4269 /* Build an indirect-ref expression over the given TREE, which represents a
4270 piece of a va_arg() expansion. */
4271 tree
4272 build_va_arg_indirect_ref (tree addr)
4274 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4276 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4277 mf_mark (addr);
4279 return addr;
4282 /* Return a dummy expression of type TYPE in order to keep going after an
4283 error. */
4285 static tree
4286 dummy_object (tree type)
4288 tree t = build_int_cst (build_pointer_type (type), 0);
4289 return build2 (MEM_REF, type, t, t);
4292 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4293 builtin function, but a very special sort of operator. */
4295 enum gimplify_status
4296 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4298 tree promoted_type, have_va_type;
4299 tree valist = TREE_OPERAND (*expr_p, 0);
4300 tree type = TREE_TYPE (*expr_p);
4301 tree t;
4302 location_t loc = EXPR_LOCATION (*expr_p);
4304 /* Verify that valist is of the proper type. */
4305 have_va_type = TREE_TYPE (valist);
4306 if (have_va_type == error_mark_node)
4307 return GS_ERROR;
4308 have_va_type = targetm.canonical_va_list_type (have_va_type);
4310 if (have_va_type == NULL_TREE)
4312 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4313 return GS_ERROR;
4316 /* Generate a diagnostic for requesting data of a type that cannot
4317 be passed through `...' due to type promotion at the call site. */
4318 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4319 != type)
4321 static bool gave_help;
4322 bool warned;
4324 /* Unfortunately, this is merely undefined, rather than a constraint
4325 violation, so we cannot make this an error. If this call is never
4326 executed, the program is still strictly conforming. */
4327 warned = warning_at (loc, 0,
4328 "%qT is promoted to %qT when passed through %<...%>",
4329 type, promoted_type);
4330 if (!gave_help && warned)
4332 gave_help = true;
4333 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4334 promoted_type, type);
4337 /* We can, however, treat "undefined" any way we please.
4338 Call abort to encourage the user to fix the program. */
4339 if (warned)
4340 inform (loc, "if this code is reached, the program will abort");
4341 /* Before the abort, allow the evaluation of the va_list
4342 expression to exit or longjmp. */
4343 gimplify_and_add (valist, pre_p);
4344 t = build_call_expr_loc (loc,
4345 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4346 gimplify_and_add (t, pre_p);
4348 /* This is dead code, but go ahead and finish so that the
4349 mode of the result comes out right. */
4350 *expr_p = dummy_object (type);
4351 return GS_ALL_DONE;
4353 else
4355 /* Make it easier for the backends by protecting the valist argument
4356 from multiple evaluations. */
4357 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4359 /* For this case, the backends will be expecting a pointer to
4360 TREE_TYPE (abi), but it's possible we've
4361 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4362 So fix it. */
4363 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4365 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4366 valist = fold_convert_loc (loc, p1,
4367 build_fold_addr_expr_loc (loc, valist));
4370 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4372 else
4373 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4375 if (!targetm.gimplify_va_arg_expr)
4376 /* FIXME: Once most targets are converted we should merely
4377 assert this is non-null. */
4378 return GS_ALL_DONE;
4380 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4381 return GS_OK;
4385 /* Expand EXP, a call to __builtin_va_end. */
4387 static rtx
4388 expand_builtin_va_end (tree exp)
4390 tree valist = CALL_EXPR_ARG (exp, 0);
4392 /* Evaluate for side effects, if needed. I hate macros that don't
4393 do that. */
4394 if (TREE_SIDE_EFFECTS (valist))
4395 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4397 return const0_rtx;
4400 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4401 builtin rather than just as an assignment in stdarg.h because of the
4402 nastiness of array-type va_list types. */
4404 static rtx
4405 expand_builtin_va_copy (tree exp)
4407 tree dst, src, t;
4408 location_t loc = EXPR_LOCATION (exp);
4410 dst = CALL_EXPR_ARG (exp, 0);
4411 src = CALL_EXPR_ARG (exp, 1);
4413 dst = stabilize_va_list_loc (loc, dst, 1);
4414 src = stabilize_va_list_loc (loc, src, 0);
4416 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4418 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4420 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4421 TREE_SIDE_EFFECTS (t) = 1;
4422 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4424 else
4426 rtx dstb, srcb, size;
4428 /* Evaluate to pointers. */
4429 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4430 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4431 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4432 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4434 dstb = convert_memory_address (Pmode, dstb);
4435 srcb = convert_memory_address (Pmode, srcb);
4437 /* "Dereference" to BLKmode memories. */
4438 dstb = gen_rtx_MEM (BLKmode, dstb);
4439 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4440 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4441 srcb = gen_rtx_MEM (BLKmode, srcb);
4442 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4443 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4445 /* Copy. */
4446 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4449 return const0_rtx;
4452 /* Expand a call to one of the builtin functions __builtin_frame_address or
4453 __builtin_return_address. */
4455 static rtx
4456 expand_builtin_frame_address (tree fndecl, tree exp)
4458 /* The argument must be a nonnegative integer constant.
4459 It counts the number of frames to scan up the stack.
4460 The value is the return address saved in that frame. */
4461 if (call_expr_nargs (exp) == 0)
4462 /* Warning about missing arg was already issued. */
4463 return const0_rtx;
4464 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4466 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4467 error ("invalid argument to %<__builtin_frame_address%>");
4468 else
4469 error ("invalid argument to %<__builtin_return_address%>");
4470 return const0_rtx;
4472 else
4474 rtx tem
4475 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4476 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4478 /* Some ports cannot access arbitrary stack frames. */
4479 if (tem == NULL)
4481 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4482 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4483 else
4484 warning (0, "unsupported argument to %<__builtin_return_address%>");
4485 return const0_rtx;
4488 /* For __builtin_frame_address, return what we've got. */
4489 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4490 return tem;
4492 if (!REG_P (tem)
4493 && ! CONSTANT_P (tem))
4494 tem = copy_to_mode_reg (Pmode, tem);
4495 return tem;
4499 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4500 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4501 is the same as for allocate_dynamic_stack_space. */
4503 static rtx
4504 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4506 rtx op0;
4507 rtx result;
4509 /* Emit normal call if marked not-inlineable. */
4510 if (CALL_CANNOT_INLINE_P (exp))
4511 return NULL_RTX;
4513 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4514 return NULL_RTX;
4516 /* Compute the argument. */
4517 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4519 /* Allocate the desired space. */
4520 result = allocate_dynamic_stack_space (op0, 0, BIGGEST_ALIGNMENT,
4521 cannot_accumulate);
4522 result = convert_memory_address (ptr_mode, result);
4524 return result;
4527 /* Expand a call to a bswap builtin with argument ARG0. MODE
4528 is the mode to expand with. */
4530 static rtx
4531 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4533 enum machine_mode mode;
4534 tree arg;
4535 rtx op0;
4537 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4538 return NULL_RTX;
4540 arg = CALL_EXPR_ARG (exp, 0);
4541 mode = TYPE_MODE (TREE_TYPE (arg));
4542 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4544 target = expand_unop (mode, bswap_optab, op0, target, 1);
4546 gcc_assert (target);
4548 return convert_to_mode (mode, target, 0);
4551 /* Expand a call to a unary builtin in EXP.
4552 Return NULL_RTX if a normal call should be emitted rather than expanding the
4553 function in-line. If convenient, the result should be placed in TARGET.
4554 SUBTARGET may be used as the target for computing one of EXP's operands. */
4556 static rtx
4557 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4558 rtx subtarget, optab op_optab)
4560 rtx op0;
4562 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4563 return NULL_RTX;
4565 /* Compute the argument. */
4566 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4567 (subtarget
4568 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4569 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4570 VOIDmode, EXPAND_NORMAL);
4571 /* Compute op, into TARGET if possible.
4572 Set TARGET to wherever the result comes back. */
4573 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4574 op_optab, op0, target, 1);
4575 gcc_assert (target);
4577 return convert_to_mode (target_mode, target, 0);
4580 /* Expand a call to __builtin_expect. We just return our argument
4581 as the builtin_expect semantic should've been already executed by
4582 tree branch prediction pass. */
4584 static rtx
4585 expand_builtin_expect (tree exp, rtx target)
4587 tree arg;
4589 if (call_expr_nargs (exp) < 2)
4590 return const0_rtx;
4591 arg = CALL_EXPR_ARG (exp, 0);
4593 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4594 /* When guessing was done, the hints should be already stripped away. */
4595 gcc_assert (!flag_guess_branch_prob
4596 || optimize == 0 || seen_error ());
4597 return target;
4600 void
4601 expand_builtin_trap (void)
4603 #ifdef HAVE_trap
4604 if (HAVE_trap)
4605 emit_insn (gen_trap ());
4606 else
4607 #endif
4608 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4609 emit_barrier ();
4612 /* Expand a call to __builtin_unreachable. We do nothing except emit
4613 a barrier saying that control flow will not pass here.
4615 It is the responsibility of the program being compiled to ensure
4616 that control flow does never reach __builtin_unreachable. */
4617 static void
4618 expand_builtin_unreachable (void)
4620 emit_barrier ();
4623 /* Expand EXP, a call to fabs, fabsf or fabsl.
4624 Return NULL_RTX if a normal call should be emitted rather than expanding
4625 the function inline. If convenient, the result should be placed
4626 in TARGET. SUBTARGET may be used as the target for computing
4627 the operand. */
4629 static rtx
4630 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4632 enum machine_mode mode;
4633 tree arg;
4634 rtx op0;
4636 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4637 return NULL_RTX;
4639 arg = CALL_EXPR_ARG (exp, 0);
4640 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4641 mode = TYPE_MODE (TREE_TYPE (arg));
4642 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4643 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4646 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4647 Return NULL is a normal call should be emitted rather than expanding the
4648 function inline. If convenient, the result should be placed in TARGET.
4649 SUBTARGET may be used as the target for computing the operand. */
4651 static rtx
4652 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4654 rtx op0, op1;
4655 tree arg;
4657 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4658 return NULL_RTX;
4660 arg = CALL_EXPR_ARG (exp, 0);
4661 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4663 arg = CALL_EXPR_ARG (exp, 1);
4664 op1 = expand_normal (arg);
4666 return expand_copysign (op0, op1, target);
4669 /* Create a new constant string literal and return a char* pointer to it.
4670 The STRING_CST value is the LEN characters at STR. */
4671 tree
4672 build_string_literal (int len, const char *str)
4674 tree t, elem, index, type;
4676 t = build_string (len, str);
4677 elem = build_type_variant (char_type_node, 1, 0);
4678 index = build_index_type (size_int (len - 1));
4679 type = build_array_type (elem, index);
4680 TREE_TYPE (t) = type;
4681 TREE_CONSTANT (t) = 1;
4682 TREE_READONLY (t) = 1;
4683 TREE_STATIC (t) = 1;
4685 type = build_pointer_type (elem);
4686 t = build1 (ADDR_EXPR, type,
4687 build4 (ARRAY_REF, elem,
4688 t, integer_zero_node, NULL_TREE, NULL_TREE));
4689 return t;
4692 /* Expand a call to __builtin___clear_cache. */
4694 static rtx
4695 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4697 #ifndef HAVE_clear_cache
4698 #ifdef CLEAR_INSN_CACHE
4699 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4700 does something. Just do the default expansion to a call to
4701 __clear_cache(). */
4702 return NULL_RTX;
4703 #else
4704 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4705 does nothing. There is no need to call it. Do nothing. */
4706 return const0_rtx;
4707 #endif /* CLEAR_INSN_CACHE */
4708 #else
4709 /* We have a "clear_cache" insn, and it will handle everything. */
4710 tree begin, end;
4711 rtx begin_rtx, end_rtx;
4713 /* We must not expand to a library call. If we did, any
4714 fallback library function in libgcc that might contain a call to
4715 __builtin___clear_cache() would recurse infinitely. */
4716 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4718 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4719 return const0_rtx;
4722 if (HAVE_clear_cache)
4724 struct expand_operand ops[2];
4726 begin = CALL_EXPR_ARG (exp, 0);
4727 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4729 end = CALL_EXPR_ARG (exp, 1);
4730 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4732 create_address_operand (&ops[0], begin_rtx);
4733 create_address_operand (&ops[1], end_rtx);
4734 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4735 return const0_rtx;
4737 return const0_rtx;
4738 #endif /* HAVE_clear_cache */
4741 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4743 static rtx
4744 round_trampoline_addr (rtx tramp)
4746 rtx temp, addend, mask;
4748 /* If we don't need too much alignment, we'll have been guaranteed
4749 proper alignment by get_trampoline_type. */
4750 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4751 return tramp;
4753 /* Round address up to desired boundary. */
4754 temp = gen_reg_rtx (Pmode);
4755 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
4756 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
4758 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4759 temp, 0, OPTAB_LIB_WIDEN);
4760 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4761 temp, 0, OPTAB_LIB_WIDEN);
4763 return tramp;
4766 static rtx
4767 expand_builtin_init_trampoline (tree exp)
4769 tree t_tramp, t_func, t_chain;
4770 rtx m_tramp, r_tramp, r_chain, tmp;
4772 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4773 POINTER_TYPE, VOID_TYPE))
4774 return NULL_RTX;
4776 t_tramp = CALL_EXPR_ARG (exp, 0);
4777 t_func = CALL_EXPR_ARG (exp, 1);
4778 t_chain = CALL_EXPR_ARG (exp, 2);
4780 r_tramp = expand_normal (t_tramp);
4781 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4782 MEM_NOTRAP_P (m_tramp) = 1;
4784 /* The TRAMP argument should be the address of a field within the
4785 local function's FRAME decl. Let's see if we can fill in the
4786 to fill in the MEM_ATTRs for this memory. */
4787 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4788 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
4789 true, 0);
4791 tmp = round_trampoline_addr (r_tramp);
4792 if (tmp != r_tramp)
4794 m_tramp = change_address (m_tramp, BLKmode, tmp);
4795 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4796 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
4799 /* The FUNC argument should be the address of the nested function.
4800 Extract the actual function decl to pass to the hook. */
4801 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4802 t_func = TREE_OPERAND (t_func, 0);
4803 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4805 r_chain = expand_normal (t_chain);
4807 /* Generate insns to initialize the trampoline. */
4808 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4810 trampolines_created = 1;
4812 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4813 "trampoline generated for nested function %qD", t_func);
4815 return const0_rtx;
4818 static rtx
4819 expand_builtin_adjust_trampoline (tree exp)
4821 rtx tramp;
4823 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4824 return NULL_RTX;
4826 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4827 tramp = round_trampoline_addr (tramp);
4828 if (targetm.calls.trampoline_adjust_address)
4829 tramp = targetm.calls.trampoline_adjust_address (tramp);
4831 return tramp;
4834 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4835 function. The function first checks whether the back end provides
4836 an insn to implement signbit for the respective mode. If not, it
4837 checks whether the floating point format of the value is such that
4838 the sign bit can be extracted. If that is not the case, the
4839 function returns NULL_RTX to indicate that a normal call should be
4840 emitted rather than expanding the function in-line. EXP is the
4841 expression that is a call to the builtin function; if convenient,
4842 the result should be placed in TARGET. */
4843 static rtx
4844 expand_builtin_signbit (tree exp, rtx target)
4846 const struct real_format *fmt;
4847 enum machine_mode fmode, imode, rmode;
4848 tree arg;
4849 int word, bitpos;
4850 enum insn_code icode;
4851 rtx temp;
4852 location_t loc = EXPR_LOCATION (exp);
4854 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4855 return NULL_RTX;
4857 arg = CALL_EXPR_ARG (exp, 0);
4858 fmode = TYPE_MODE (TREE_TYPE (arg));
4859 rmode = TYPE_MODE (TREE_TYPE (exp));
4860 fmt = REAL_MODE_FORMAT (fmode);
4862 arg = builtin_save_expr (arg);
4864 /* Expand the argument yielding a RTX expression. */
4865 temp = expand_normal (arg);
4867 /* Check if the back end provides an insn that handles signbit for the
4868 argument's mode. */
4869 icode = optab_handler (signbit_optab, fmode);
4870 if (icode != CODE_FOR_nothing)
4872 rtx last = get_last_insn ();
4873 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4874 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4875 return target;
4876 delete_insns_since (last);
4879 /* For floating point formats without a sign bit, implement signbit
4880 as "ARG < 0.0". */
4881 bitpos = fmt->signbit_ro;
4882 if (bitpos < 0)
4884 /* But we can't do this if the format supports signed zero. */
4885 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4886 return NULL_RTX;
4888 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4889 build_real (TREE_TYPE (arg), dconst0));
4890 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4893 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4895 imode = int_mode_for_mode (fmode);
4896 if (imode == BLKmode)
4897 return NULL_RTX;
4898 temp = gen_lowpart (imode, temp);
4900 else
4902 imode = word_mode;
4903 /* Handle targets with different FP word orders. */
4904 if (FLOAT_WORDS_BIG_ENDIAN)
4905 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4906 else
4907 word = bitpos / BITS_PER_WORD;
4908 temp = operand_subword_force (temp, word, fmode);
4909 bitpos = bitpos % BITS_PER_WORD;
4912 /* Force the intermediate word_mode (or narrower) result into a
4913 register. This avoids attempting to create paradoxical SUBREGs
4914 of floating point modes below. */
4915 temp = force_reg (imode, temp);
4917 /* If the bitpos is within the "result mode" lowpart, the operation
4918 can be implement with a single bitwise AND. Otherwise, we need
4919 a right shift and an AND. */
4921 if (bitpos < GET_MODE_BITSIZE (rmode))
4923 double_int mask = double_int_setbit (double_int_zero, bitpos);
4925 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4926 temp = gen_lowpart (rmode, temp);
4927 temp = expand_binop (rmode, and_optab, temp,
4928 immed_double_int_const (mask, rmode),
4929 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4931 else
4933 /* Perform a logical right shift to place the signbit in the least
4934 significant bit, then truncate the result to the desired mode
4935 and mask just this bit. */
4936 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4937 temp = gen_lowpart (rmode, temp);
4938 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4939 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4942 return temp;
4945 /* Expand fork or exec calls. TARGET is the desired target of the
4946 call. EXP is the call. FN is the
4947 identificator of the actual function. IGNORE is nonzero if the
4948 value is to be ignored. */
4950 static rtx
4951 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4953 tree id, decl;
4954 tree call;
4956 /* If we are not profiling, just call the function. */
4957 if (!profile_arc_flag)
4958 return NULL_RTX;
4960 /* Otherwise call the wrapper. This should be equivalent for the rest of
4961 compiler, so the code does not diverge, and the wrapper may run the
4962 code necessary for keeping the profiling sane. */
4964 switch (DECL_FUNCTION_CODE (fn))
4966 case BUILT_IN_FORK:
4967 id = get_identifier ("__gcov_fork");
4968 break;
4970 case BUILT_IN_EXECL:
4971 id = get_identifier ("__gcov_execl");
4972 break;
4974 case BUILT_IN_EXECV:
4975 id = get_identifier ("__gcov_execv");
4976 break;
4978 case BUILT_IN_EXECLP:
4979 id = get_identifier ("__gcov_execlp");
4980 break;
4982 case BUILT_IN_EXECLE:
4983 id = get_identifier ("__gcov_execle");
4984 break;
4986 case BUILT_IN_EXECVP:
4987 id = get_identifier ("__gcov_execvp");
4988 break;
4990 case BUILT_IN_EXECVE:
4991 id = get_identifier ("__gcov_execve");
4992 break;
4994 default:
4995 gcc_unreachable ();
4998 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4999 FUNCTION_DECL, id, TREE_TYPE (fn));
5000 DECL_EXTERNAL (decl) = 1;
5001 TREE_PUBLIC (decl) = 1;
5002 DECL_ARTIFICIAL (decl) = 1;
5003 TREE_NOTHROW (decl) = 1;
5004 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5005 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5006 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5007 return expand_call (call, target, ignore);
5012 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5013 the pointer in these functions is void*, the tree optimizers may remove
5014 casts. The mode computed in expand_builtin isn't reliable either, due
5015 to __sync_bool_compare_and_swap.
5017 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5018 group of builtins. This gives us log2 of the mode size. */
5020 static inline enum machine_mode
5021 get_builtin_sync_mode (int fcode_diff)
5023 /* The size is not negotiable, so ask not to get BLKmode in return
5024 if the target indicates that a smaller size would be better. */
5025 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5028 /* Expand the memory expression LOC and return the appropriate memory operand
5029 for the builtin_sync operations. */
5031 static rtx
5032 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5034 rtx addr, mem;
5036 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5037 addr = convert_memory_address (Pmode, addr);
5039 /* Note that we explicitly do not want any alias information for this
5040 memory, so that we kill all other live memories. Otherwise we don't
5041 satisfy the full barrier semantics of the intrinsic. */
5042 mem = validize_mem (gen_rtx_MEM (mode, addr));
5044 /* The alignment needs to be at least according to that of the mode. */
5045 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5046 get_pointer_alignment (loc, BIGGEST_ALIGNMENT)));
5047 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5048 MEM_VOLATILE_P (mem) = 1;
5050 return mem;
5053 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5054 EXP is the CALL_EXPR. CODE is the rtx code
5055 that corresponds to the arithmetic or logical operation from the name;
5056 an exception here is that NOT actually means NAND. TARGET is an optional
5057 place for us to store the results; AFTER is true if this is the
5058 fetch_and_xxx form. IGNORE is true if we don't actually care about
5059 the result of the operation at all. */
5061 static rtx
5062 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5063 enum rtx_code code, bool after,
5064 rtx target, bool ignore)
5066 rtx val, mem;
5067 enum machine_mode old_mode;
5068 location_t loc = EXPR_LOCATION (exp);
5070 if (code == NOT && warn_sync_nand)
5072 tree fndecl = get_callee_fndecl (exp);
5073 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5075 static bool warned_f_a_n, warned_n_a_f;
5077 switch (fcode)
5079 case BUILT_IN_FETCH_AND_NAND_1:
5080 case BUILT_IN_FETCH_AND_NAND_2:
5081 case BUILT_IN_FETCH_AND_NAND_4:
5082 case BUILT_IN_FETCH_AND_NAND_8:
5083 case BUILT_IN_FETCH_AND_NAND_16:
5085 if (warned_f_a_n)
5086 break;
5088 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5089 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5090 warned_f_a_n = true;
5091 break;
5093 case BUILT_IN_NAND_AND_FETCH_1:
5094 case BUILT_IN_NAND_AND_FETCH_2:
5095 case BUILT_IN_NAND_AND_FETCH_4:
5096 case BUILT_IN_NAND_AND_FETCH_8:
5097 case BUILT_IN_NAND_AND_FETCH_16:
5099 if (warned_n_a_f)
5100 break;
5102 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5103 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5104 warned_n_a_f = true;
5105 break;
5107 default:
5108 gcc_unreachable ();
5112 /* Expand the operands. */
5113 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5115 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5116 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5117 of CONST_INTs, where we know the old_mode only from the call argument. */
5118 old_mode = GET_MODE (val);
5119 if (old_mode == VOIDmode)
5120 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5121 val = convert_modes (mode, old_mode, val, 1);
5123 if (ignore)
5124 return expand_sync_operation (mem, val, code);
5125 else
5126 return expand_sync_fetch_operation (mem, val, code, after, target);
5129 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5130 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5131 true if this is the boolean form. TARGET is a place for us to store the
5132 results; this is NOT optional if IS_BOOL is true. */
5134 static rtx
5135 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5136 bool is_bool, rtx target)
5138 rtx old_val, new_val, mem;
5139 enum machine_mode old_mode;
5141 /* Expand the operands. */
5142 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5145 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5146 mode, EXPAND_NORMAL);
5147 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5148 of CONST_INTs, where we know the old_mode only from the call argument. */
5149 old_mode = GET_MODE (old_val);
5150 if (old_mode == VOIDmode)
5151 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5152 old_val = convert_modes (mode, old_mode, old_val, 1);
5154 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5155 mode, EXPAND_NORMAL);
5156 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5157 of CONST_INTs, where we know the old_mode only from the call argument. */
5158 old_mode = GET_MODE (new_val);
5159 if (old_mode == VOIDmode)
5160 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5161 new_val = convert_modes (mode, old_mode, new_val, 1);
5163 if (is_bool)
5164 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5165 else
5166 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5169 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5170 general form is actually an atomic exchange, and some targets only
5171 support a reduced form with the second argument being a constant 1.
5172 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5173 the results. */
5175 static rtx
5176 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5177 rtx target)
5179 rtx val, mem;
5180 enum machine_mode old_mode;
5182 /* Expand the operands. */
5183 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5184 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5185 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5186 of CONST_INTs, where we know the old_mode only from the call argument. */
5187 old_mode = GET_MODE (val);
5188 if (old_mode == VOIDmode)
5189 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5190 val = convert_modes (mode, old_mode, val, 1);
5192 return expand_sync_lock_test_and_set (mem, val, target);
5195 /* Expand the __sync_synchronize intrinsic. */
5197 static void
5198 expand_builtin_synchronize (void)
5200 gimple x;
5201 VEC (tree, gc) *v_clobbers;
5203 #ifdef HAVE_memory_barrier
5204 if (HAVE_memory_barrier)
5206 emit_insn (gen_memory_barrier ());
5207 return;
5209 #endif
5211 if (synchronize_libfunc != NULL_RTX)
5213 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5214 return;
5217 /* If no explicit memory barrier instruction is available, create an
5218 empty asm stmt with a memory clobber. */
5219 v_clobbers = VEC_alloc (tree, gc, 1);
5220 VEC_quick_push (tree, v_clobbers,
5221 tree_cons (NULL, build_string (6, "memory"), NULL));
5222 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5223 gimple_asm_set_volatile (x, true);
5224 expand_asm_stmt (x);
5227 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5229 static void
5230 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5232 struct expand_operand ops[2];
5233 enum insn_code icode;
5234 rtx mem;
5236 /* Expand the operands. */
5237 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5239 /* If there is an explicit operation in the md file, use it. */
5240 icode = direct_optab_handler (sync_lock_release_optab, mode);
5241 if (icode != CODE_FOR_nothing)
5243 create_fixed_operand (&ops[0], mem);
5244 create_input_operand (&ops[1], const0_rtx, mode);
5245 if (maybe_expand_insn (icode, 2, ops))
5246 return;
5249 /* Otherwise we can implement this operation by emitting a barrier
5250 followed by a store of zero. */
5251 expand_builtin_synchronize ();
5252 emit_move_insn (mem, const0_rtx);
5255 /* Expand an expression EXP that calls a built-in function,
5256 with result going to TARGET if that's convenient
5257 (and in mode MODE if that's convenient).
5258 SUBTARGET may be used as the target for computing one of EXP's operands.
5259 IGNORE is nonzero if the value is to be ignored. */
5262 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5263 int ignore)
5265 tree fndecl = get_callee_fndecl (exp);
5266 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5267 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5268 int flags;
5270 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5271 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5273 /* When not optimizing, generate calls to library functions for a certain
5274 set of builtins. */
5275 if (!optimize
5276 && !called_as_built_in (fndecl)
5277 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5278 && fcode != BUILT_IN_ALLOCA
5279 && fcode != BUILT_IN_FREE)
5280 return expand_call (exp, target, ignore);
5282 /* The built-in function expanders test for target == const0_rtx
5283 to determine whether the function's result will be ignored. */
5284 if (ignore)
5285 target = const0_rtx;
5287 /* If the result of a pure or const built-in function is ignored, and
5288 none of its arguments are volatile, we can avoid expanding the
5289 built-in call and just evaluate the arguments for side-effects. */
5290 if (target == const0_rtx
5291 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5292 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5294 bool volatilep = false;
5295 tree arg;
5296 call_expr_arg_iterator iter;
5298 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5299 if (TREE_THIS_VOLATILE (arg))
5301 volatilep = true;
5302 break;
5305 if (! volatilep)
5307 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5308 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5309 return const0_rtx;
5313 switch (fcode)
5315 CASE_FLT_FN (BUILT_IN_FABS):
5316 target = expand_builtin_fabs (exp, target, subtarget);
5317 if (target)
5318 return target;
5319 break;
5321 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5322 target = expand_builtin_copysign (exp, target, subtarget);
5323 if (target)
5324 return target;
5325 break;
5327 /* Just do a normal library call if we were unable to fold
5328 the values. */
5329 CASE_FLT_FN (BUILT_IN_CABS):
5330 break;
5332 CASE_FLT_FN (BUILT_IN_EXP):
5333 CASE_FLT_FN (BUILT_IN_EXP10):
5334 CASE_FLT_FN (BUILT_IN_POW10):
5335 CASE_FLT_FN (BUILT_IN_EXP2):
5336 CASE_FLT_FN (BUILT_IN_EXPM1):
5337 CASE_FLT_FN (BUILT_IN_LOGB):
5338 CASE_FLT_FN (BUILT_IN_LOG):
5339 CASE_FLT_FN (BUILT_IN_LOG10):
5340 CASE_FLT_FN (BUILT_IN_LOG2):
5341 CASE_FLT_FN (BUILT_IN_LOG1P):
5342 CASE_FLT_FN (BUILT_IN_TAN):
5343 CASE_FLT_FN (BUILT_IN_ASIN):
5344 CASE_FLT_FN (BUILT_IN_ACOS):
5345 CASE_FLT_FN (BUILT_IN_ATAN):
5346 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5347 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5348 because of possible accuracy problems. */
5349 if (! flag_unsafe_math_optimizations)
5350 break;
5351 CASE_FLT_FN (BUILT_IN_SQRT):
5352 CASE_FLT_FN (BUILT_IN_FLOOR):
5353 CASE_FLT_FN (BUILT_IN_CEIL):
5354 CASE_FLT_FN (BUILT_IN_TRUNC):
5355 CASE_FLT_FN (BUILT_IN_ROUND):
5356 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5357 CASE_FLT_FN (BUILT_IN_RINT):
5358 target = expand_builtin_mathfn (exp, target, subtarget);
5359 if (target)
5360 return target;
5361 break;
5363 CASE_FLT_FN (BUILT_IN_FMA):
5364 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5365 if (target)
5366 return target;
5367 break;
5369 CASE_FLT_FN (BUILT_IN_ILOGB):
5370 if (! flag_unsafe_math_optimizations)
5371 break;
5372 CASE_FLT_FN (BUILT_IN_ISINF):
5373 CASE_FLT_FN (BUILT_IN_FINITE):
5374 case BUILT_IN_ISFINITE:
5375 case BUILT_IN_ISNORMAL:
5376 target = expand_builtin_interclass_mathfn (exp, target);
5377 if (target)
5378 return target;
5379 break;
5381 CASE_FLT_FN (BUILT_IN_LCEIL):
5382 CASE_FLT_FN (BUILT_IN_LLCEIL):
5383 CASE_FLT_FN (BUILT_IN_LFLOOR):
5384 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5385 target = expand_builtin_int_roundingfn (exp, target);
5386 if (target)
5387 return target;
5388 break;
5390 CASE_FLT_FN (BUILT_IN_LRINT):
5391 CASE_FLT_FN (BUILT_IN_LLRINT):
5392 CASE_FLT_FN (BUILT_IN_LROUND):
5393 CASE_FLT_FN (BUILT_IN_LLROUND):
5394 target = expand_builtin_int_roundingfn_2 (exp, target);
5395 if (target)
5396 return target;
5397 break;
5399 CASE_FLT_FN (BUILT_IN_POWI):
5400 target = expand_builtin_powi (exp, target);
5401 if (target)
5402 return target;
5403 break;
5405 CASE_FLT_FN (BUILT_IN_ATAN2):
5406 CASE_FLT_FN (BUILT_IN_LDEXP):
5407 CASE_FLT_FN (BUILT_IN_SCALB):
5408 CASE_FLT_FN (BUILT_IN_SCALBN):
5409 CASE_FLT_FN (BUILT_IN_SCALBLN):
5410 if (! flag_unsafe_math_optimizations)
5411 break;
5413 CASE_FLT_FN (BUILT_IN_FMOD):
5414 CASE_FLT_FN (BUILT_IN_REMAINDER):
5415 CASE_FLT_FN (BUILT_IN_DREM):
5416 CASE_FLT_FN (BUILT_IN_POW):
5417 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5418 if (target)
5419 return target;
5420 break;
5422 CASE_FLT_FN (BUILT_IN_CEXPI):
5423 target = expand_builtin_cexpi (exp, target);
5424 gcc_assert (target);
5425 return target;
5427 CASE_FLT_FN (BUILT_IN_SIN):
5428 CASE_FLT_FN (BUILT_IN_COS):
5429 if (! flag_unsafe_math_optimizations)
5430 break;
5431 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5432 if (target)
5433 return target;
5434 break;
5436 CASE_FLT_FN (BUILT_IN_SINCOS):
5437 if (! flag_unsafe_math_optimizations)
5438 break;
5439 target = expand_builtin_sincos (exp);
5440 if (target)
5441 return target;
5442 break;
5444 case BUILT_IN_APPLY_ARGS:
5445 return expand_builtin_apply_args ();
5447 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5448 FUNCTION with a copy of the parameters described by
5449 ARGUMENTS, and ARGSIZE. It returns a block of memory
5450 allocated on the stack into which is stored all the registers
5451 that might possibly be used for returning the result of a
5452 function. ARGUMENTS is the value returned by
5453 __builtin_apply_args. ARGSIZE is the number of bytes of
5454 arguments that must be copied. ??? How should this value be
5455 computed? We'll also need a safe worst case value for varargs
5456 functions. */
5457 case BUILT_IN_APPLY:
5458 if (!validate_arglist (exp, POINTER_TYPE,
5459 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5460 && !validate_arglist (exp, REFERENCE_TYPE,
5461 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5462 return const0_rtx;
5463 else
5465 rtx ops[3];
5467 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5468 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5469 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5471 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5474 /* __builtin_return (RESULT) causes the function to return the
5475 value described by RESULT. RESULT is address of the block of
5476 memory returned by __builtin_apply. */
5477 case BUILT_IN_RETURN:
5478 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5479 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5480 return const0_rtx;
5482 case BUILT_IN_SAVEREGS:
5483 return expand_builtin_saveregs ();
5485 case BUILT_IN_VA_ARG_PACK:
5486 /* All valid uses of __builtin_va_arg_pack () are removed during
5487 inlining. */
5488 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5489 return const0_rtx;
5491 case BUILT_IN_VA_ARG_PACK_LEN:
5492 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5493 inlining. */
5494 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5495 return const0_rtx;
5497 /* Return the address of the first anonymous stack arg. */
5498 case BUILT_IN_NEXT_ARG:
5499 if (fold_builtin_next_arg (exp, false))
5500 return const0_rtx;
5501 return expand_builtin_next_arg ();
5503 case BUILT_IN_CLEAR_CACHE:
5504 target = expand_builtin___clear_cache (exp);
5505 if (target)
5506 return target;
5507 break;
5509 case BUILT_IN_CLASSIFY_TYPE:
5510 return expand_builtin_classify_type (exp);
5512 case BUILT_IN_CONSTANT_P:
5513 return const0_rtx;
5515 case BUILT_IN_FRAME_ADDRESS:
5516 case BUILT_IN_RETURN_ADDRESS:
5517 return expand_builtin_frame_address (fndecl, exp);
5519 /* Returns the address of the area where the structure is returned.
5520 0 otherwise. */
5521 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5522 if (call_expr_nargs (exp) != 0
5523 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5524 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5525 return const0_rtx;
5526 else
5527 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5529 case BUILT_IN_ALLOCA:
5530 /* If the allocation stems from the declaration of a variable-sized
5531 object, it cannot accumulate. */
5532 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
5533 if (target)
5534 return target;
5535 break;
5537 case BUILT_IN_STACK_SAVE:
5538 return expand_stack_save ();
5540 case BUILT_IN_STACK_RESTORE:
5541 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5542 return const0_rtx;
5544 case BUILT_IN_BSWAP32:
5545 case BUILT_IN_BSWAP64:
5546 target = expand_builtin_bswap (exp, target, subtarget);
5548 if (target)
5549 return target;
5550 break;
5552 CASE_INT_FN (BUILT_IN_FFS):
5553 case BUILT_IN_FFSIMAX:
5554 target = expand_builtin_unop (target_mode, exp, target,
5555 subtarget, ffs_optab);
5556 if (target)
5557 return target;
5558 break;
5560 CASE_INT_FN (BUILT_IN_CLZ):
5561 case BUILT_IN_CLZIMAX:
5562 target = expand_builtin_unop (target_mode, exp, target,
5563 subtarget, clz_optab);
5564 if (target)
5565 return target;
5566 break;
5568 CASE_INT_FN (BUILT_IN_CTZ):
5569 case BUILT_IN_CTZIMAX:
5570 target = expand_builtin_unop (target_mode, exp, target,
5571 subtarget, ctz_optab);
5572 if (target)
5573 return target;
5574 break;
5576 CASE_INT_FN (BUILT_IN_POPCOUNT):
5577 case BUILT_IN_POPCOUNTIMAX:
5578 target = expand_builtin_unop (target_mode, exp, target,
5579 subtarget, popcount_optab);
5580 if (target)
5581 return target;
5582 break;
5584 CASE_INT_FN (BUILT_IN_PARITY):
5585 case BUILT_IN_PARITYIMAX:
5586 target = expand_builtin_unop (target_mode, exp, target,
5587 subtarget, parity_optab);
5588 if (target)
5589 return target;
5590 break;
5592 case BUILT_IN_STRLEN:
5593 target = expand_builtin_strlen (exp, target, target_mode);
5594 if (target)
5595 return target;
5596 break;
5598 case BUILT_IN_STRCPY:
5599 target = expand_builtin_strcpy (exp, target);
5600 if (target)
5601 return target;
5602 break;
5604 case BUILT_IN_STRNCPY:
5605 target = expand_builtin_strncpy (exp, target);
5606 if (target)
5607 return target;
5608 break;
5610 case BUILT_IN_STPCPY:
5611 target = expand_builtin_stpcpy (exp, target, mode);
5612 if (target)
5613 return target;
5614 break;
5616 case BUILT_IN_MEMCPY:
5617 target = expand_builtin_memcpy (exp, target);
5618 if (target)
5619 return target;
5620 break;
5622 case BUILT_IN_MEMPCPY:
5623 target = expand_builtin_mempcpy (exp, target, mode);
5624 if (target)
5625 return target;
5626 break;
5628 case BUILT_IN_MEMSET:
5629 target = expand_builtin_memset (exp, target, mode);
5630 if (target)
5631 return target;
5632 break;
5634 case BUILT_IN_BZERO:
5635 target = expand_builtin_bzero (exp);
5636 if (target)
5637 return target;
5638 break;
5640 case BUILT_IN_STRCMP:
5641 target = expand_builtin_strcmp (exp, target);
5642 if (target)
5643 return target;
5644 break;
5646 case BUILT_IN_STRNCMP:
5647 target = expand_builtin_strncmp (exp, target, mode);
5648 if (target)
5649 return target;
5650 break;
5652 case BUILT_IN_BCMP:
5653 case BUILT_IN_MEMCMP:
5654 target = expand_builtin_memcmp (exp, target, mode);
5655 if (target)
5656 return target;
5657 break;
5659 case BUILT_IN_SETJMP:
5660 /* This should have been lowered to the builtins below. */
5661 gcc_unreachable ();
5663 case BUILT_IN_SETJMP_SETUP:
5664 /* __builtin_setjmp_setup is passed a pointer to an array of five words
5665 and the receiver label. */
5666 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5668 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5669 VOIDmode, EXPAND_NORMAL);
5670 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
5671 rtx label_r = label_rtx (label);
5673 /* This is copied from the handling of non-local gotos. */
5674 expand_builtin_setjmp_setup (buf_addr, label_r);
5675 nonlocal_goto_handler_labels
5676 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
5677 nonlocal_goto_handler_labels);
5678 /* ??? Do not let expand_label treat us as such since we would
5679 not want to be both on the list of non-local labels and on
5680 the list of forced labels. */
5681 FORCED_LABEL (label) = 0;
5682 return const0_rtx;
5684 break;
5686 case BUILT_IN_SETJMP_DISPATCHER:
5687 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
5688 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5690 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
5691 rtx label_r = label_rtx (label);
5693 /* Remove the dispatcher label from the list of non-local labels
5694 since the receiver labels have been added to it above. */
5695 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
5696 return const0_rtx;
5698 break;
5700 case BUILT_IN_SETJMP_RECEIVER:
5701 /* __builtin_setjmp_receiver is passed the receiver label. */
5702 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5704 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
5705 rtx label_r = label_rtx (label);
5707 expand_builtin_setjmp_receiver (label_r);
5708 return const0_rtx;
5710 break;
5712 /* __builtin_longjmp is passed a pointer to an array of five words.
5713 It's similar to the C library longjmp function but works with
5714 __builtin_setjmp above. */
5715 case BUILT_IN_LONGJMP:
5716 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5718 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5719 VOIDmode, EXPAND_NORMAL);
5720 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
5722 if (value != const1_rtx)
5724 error ("%<__builtin_longjmp%> second argument must be 1");
5725 return const0_rtx;
5728 expand_builtin_longjmp (buf_addr, value);
5729 return const0_rtx;
5731 break;
5733 case BUILT_IN_NONLOCAL_GOTO:
5734 target = expand_builtin_nonlocal_goto (exp);
5735 if (target)
5736 return target;
5737 break;
5739 /* This updates the setjmp buffer that is its argument with the value
5740 of the current stack pointer. */
5741 case BUILT_IN_UPDATE_SETJMP_BUF:
5742 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5744 rtx buf_addr
5745 = expand_normal (CALL_EXPR_ARG (exp, 0));
5747 expand_builtin_update_setjmp_buf (buf_addr);
5748 return const0_rtx;
5750 break;
5752 case BUILT_IN_TRAP:
5753 expand_builtin_trap ();
5754 return const0_rtx;
5756 case BUILT_IN_UNREACHABLE:
5757 expand_builtin_unreachable ();
5758 return const0_rtx;
5760 CASE_FLT_FN (BUILT_IN_SIGNBIT):
5761 case BUILT_IN_SIGNBITD32:
5762 case BUILT_IN_SIGNBITD64:
5763 case BUILT_IN_SIGNBITD128:
5764 target = expand_builtin_signbit (exp, target);
5765 if (target)
5766 return target;
5767 break;
5769 /* Various hooks for the DWARF 2 __throw routine. */
5770 case BUILT_IN_UNWIND_INIT:
5771 expand_builtin_unwind_init ();
5772 return const0_rtx;
5773 case BUILT_IN_DWARF_CFA:
5774 return virtual_cfa_rtx;
5775 #ifdef DWARF2_UNWIND_INFO
5776 case BUILT_IN_DWARF_SP_COLUMN:
5777 return expand_builtin_dwarf_sp_column ();
5778 case BUILT_IN_INIT_DWARF_REG_SIZES:
5779 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
5780 return const0_rtx;
5781 #endif
5782 case BUILT_IN_FROB_RETURN_ADDR:
5783 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
5784 case BUILT_IN_EXTRACT_RETURN_ADDR:
5785 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
5786 case BUILT_IN_EH_RETURN:
5787 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
5788 CALL_EXPR_ARG (exp, 1));
5789 return const0_rtx;
5790 #ifdef EH_RETURN_DATA_REGNO
5791 case BUILT_IN_EH_RETURN_DATA_REGNO:
5792 return expand_builtin_eh_return_data_regno (exp);
5793 #endif
5794 case BUILT_IN_EXTEND_POINTER:
5795 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
5796 case BUILT_IN_EH_POINTER:
5797 return expand_builtin_eh_pointer (exp);
5798 case BUILT_IN_EH_FILTER:
5799 return expand_builtin_eh_filter (exp);
5800 case BUILT_IN_EH_COPY_VALUES:
5801 return expand_builtin_eh_copy_values (exp);
5803 case BUILT_IN_VA_START:
5804 return expand_builtin_va_start (exp);
5805 case BUILT_IN_VA_END:
5806 return expand_builtin_va_end (exp);
5807 case BUILT_IN_VA_COPY:
5808 return expand_builtin_va_copy (exp);
5809 case BUILT_IN_EXPECT:
5810 return expand_builtin_expect (exp, target);
5811 case BUILT_IN_PREFETCH:
5812 expand_builtin_prefetch (exp);
5813 return const0_rtx;
5815 case BUILT_IN_INIT_TRAMPOLINE:
5816 return expand_builtin_init_trampoline (exp);
5817 case BUILT_IN_ADJUST_TRAMPOLINE:
5818 return expand_builtin_adjust_trampoline (exp);
5820 case BUILT_IN_FORK:
5821 case BUILT_IN_EXECL:
5822 case BUILT_IN_EXECV:
5823 case BUILT_IN_EXECLP:
5824 case BUILT_IN_EXECLE:
5825 case BUILT_IN_EXECVP:
5826 case BUILT_IN_EXECVE:
5827 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
5828 if (target)
5829 return target;
5830 break;
5832 case BUILT_IN_FETCH_AND_ADD_1:
5833 case BUILT_IN_FETCH_AND_ADD_2:
5834 case BUILT_IN_FETCH_AND_ADD_4:
5835 case BUILT_IN_FETCH_AND_ADD_8:
5836 case BUILT_IN_FETCH_AND_ADD_16:
5837 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
5838 target = expand_builtin_sync_operation (mode, exp, PLUS,
5839 false, target, ignore);
5840 if (target)
5841 return target;
5842 break;
5844 case BUILT_IN_FETCH_AND_SUB_1:
5845 case BUILT_IN_FETCH_AND_SUB_2:
5846 case BUILT_IN_FETCH_AND_SUB_4:
5847 case BUILT_IN_FETCH_AND_SUB_8:
5848 case BUILT_IN_FETCH_AND_SUB_16:
5849 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
5850 target = expand_builtin_sync_operation (mode, exp, MINUS,
5851 false, target, ignore);
5852 if (target)
5853 return target;
5854 break;
5856 case BUILT_IN_FETCH_AND_OR_1:
5857 case BUILT_IN_FETCH_AND_OR_2:
5858 case BUILT_IN_FETCH_AND_OR_4:
5859 case BUILT_IN_FETCH_AND_OR_8:
5860 case BUILT_IN_FETCH_AND_OR_16:
5861 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
5862 target = expand_builtin_sync_operation (mode, exp, IOR,
5863 false, target, ignore);
5864 if (target)
5865 return target;
5866 break;
5868 case BUILT_IN_FETCH_AND_AND_1:
5869 case BUILT_IN_FETCH_AND_AND_2:
5870 case BUILT_IN_FETCH_AND_AND_4:
5871 case BUILT_IN_FETCH_AND_AND_8:
5872 case BUILT_IN_FETCH_AND_AND_16:
5873 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
5874 target = expand_builtin_sync_operation (mode, exp, AND,
5875 false, target, ignore);
5876 if (target)
5877 return target;
5878 break;
5880 case BUILT_IN_FETCH_AND_XOR_1:
5881 case BUILT_IN_FETCH_AND_XOR_2:
5882 case BUILT_IN_FETCH_AND_XOR_4:
5883 case BUILT_IN_FETCH_AND_XOR_8:
5884 case BUILT_IN_FETCH_AND_XOR_16:
5885 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
5886 target = expand_builtin_sync_operation (mode, exp, XOR,
5887 false, target, ignore);
5888 if (target)
5889 return target;
5890 break;
5892 case BUILT_IN_FETCH_AND_NAND_1:
5893 case BUILT_IN_FETCH_AND_NAND_2:
5894 case BUILT_IN_FETCH_AND_NAND_4:
5895 case BUILT_IN_FETCH_AND_NAND_8:
5896 case BUILT_IN_FETCH_AND_NAND_16:
5897 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
5898 target = expand_builtin_sync_operation (mode, exp, NOT,
5899 false, target, ignore);
5900 if (target)
5901 return target;
5902 break;
5904 case BUILT_IN_ADD_AND_FETCH_1:
5905 case BUILT_IN_ADD_AND_FETCH_2:
5906 case BUILT_IN_ADD_AND_FETCH_4:
5907 case BUILT_IN_ADD_AND_FETCH_8:
5908 case BUILT_IN_ADD_AND_FETCH_16:
5909 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
5910 target = expand_builtin_sync_operation (mode, exp, PLUS,
5911 true, target, ignore);
5912 if (target)
5913 return target;
5914 break;
5916 case BUILT_IN_SUB_AND_FETCH_1:
5917 case BUILT_IN_SUB_AND_FETCH_2:
5918 case BUILT_IN_SUB_AND_FETCH_4:
5919 case BUILT_IN_SUB_AND_FETCH_8:
5920 case BUILT_IN_SUB_AND_FETCH_16:
5921 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
5922 target = expand_builtin_sync_operation (mode, exp, MINUS,
5923 true, target, ignore);
5924 if (target)
5925 return target;
5926 break;
5928 case BUILT_IN_OR_AND_FETCH_1:
5929 case BUILT_IN_OR_AND_FETCH_2:
5930 case BUILT_IN_OR_AND_FETCH_4:
5931 case BUILT_IN_OR_AND_FETCH_8:
5932 case BUILT_IN_OR_AND_FETCH_16:
5933 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
5934 target = expand_builtin_sync_operation (mode, exp, IOR,
5935 true, target, ignore);
5936 if (target)
5937 return target;
5938 break;
5940 case BUILT_IN_AND_AND_FETCH_1:
5941 case BUILT_IN_AND_AND_FETCH_2:
5942 case BUILT_IN_AND_AND_FETCH_4:
5943 case BUILT_IN_AND_AND_FETCH_8:
5944 case BUILT_IN_AND_AND_FETCH_16:
5945 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
5946 target = expand_builtin_sync_operation (mode, exp, AND,
5947 true, target, ignore);
5948 if (target)
5949 return target;
5950 break;
5952 case BUILT_IN_XOR_AND_FETCH_1:
5953 case BUILT_IN_XOR_AND_FETCH_2:
5954 case BUILT_IN_XOR_AND_FETCH_4:
5955 case BUILT_IN_XOR_AND_FETCH_8:
5956 case BUILT_IN_XOR_AND_FETCH_16:
5957 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
5958 target = expand_builtin_sync_operation (mode, exp, XOR,
5959 true, target, ignore);
5960 if (target)
5961 return target;
5962 break;
5964 case BUILT_IN_NAND_AND_FETCH_1:
5965 case BUILT_IN_NAND_AND_FETCH_2:
5966 case BUILT_IN_NAND_AND_FETCH_4:
5967 case BUILT_IN_NAND_AND_FETCH_8:
5968 case BUILT_IN_NAND_AND_FETCH_16:
5969 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
5970 target = expand_builtin_sync_operation (mode, exp, NOT,
5971 true, target, ignore);
5972 if (target)
5973 return target;
5974 break;
5976 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
5977 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
5978 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
5979 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
5980 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
5981 if (mode == VOIDmode)
5982 mode = TYPE_MODE (boolean_type_node);
5983 if (!target || !register_operand (target, mode))
5984 target = gen_reg_rtx (mode);
5986 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
5987 target = expand_builtin_compare_and_swap (mode, exp, true, target);
5988 if (target)
5989 return target;
5990 break;
5992 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
5993 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
5994 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
5995 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
5996 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
5997 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
5998 target = expand_builtin_compare_and_swap (mode, exp, false, target);
5999 if (target)
6000 return target;
6001 break;
6003 case BUILT_IN_LOCK_TEST_AND_SET_1:
6004 case BUILT_IN_LOCK_TEST_AND_SET_2:
6005 case BUILT_IN_LOCK_TEST_AND_SET_4:
6006 case BUILT_IN_LOCK_TEST_AND_SET_8:
6007 case BUILT_IN_LOCK_TEST_AND_SET_16:
6008 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6009 target = expand_builtin_lock_test_and_set (mode, exp, target);
6010 if (target)
6011 return target;
6012 break;
6014 case BUILT_IN_LOCK_RELEASE_1:
6015 case BUILT_IN_LOCK_RELEASE_2:
6016 case BUILT_IN_LOCK_RELEASE_4:
6017 case BUILT_IN_LOCK_RELEASE_8:
6018 case BUILT_IN_LOCK_RELEASE_16:
6019 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6020 expand_builtin_lock_release (mode, exp);
6021 return const0_rtx;
6023 case BUILT_IN_SYNCHRONIZE:
6024 expand_builtin_synchronize ();
6025 return const0_rtx;
6027 case BUILT_IN_OBJECT_SIZE:
6028 return expand_builtin_object_size (exp);
6030 case BUILT_IN_MEMCPY_CHK:
6031 case BUILT_IN_MEMPCPY_CHK:
6032 case BUILT_IN_MEMMOVE_CHK:
6033 case BUILT_IN_MEMSET_CHK:
6034 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6035 if (target)
6036 return target;
6037 break;
6039 case BUILT_IN_STRCPY_CHK:
6040 case BUILT_IN_STPCPY_CHK:
6041 case BUILT_IN_STRNCPY_CHK:
6042 case BUILT_IN_STRCAT_CHK:
6043 case BUILT_IN_STRNCAT_CHK:
6044 case BUILT_IN_SNPRINTF_CHK:
6045 case BUILT_IN_VSNPRINTF_CHK:
6046 maybe_emit_chk_warning (exp, fcode);
6047 break;
6049 case BUILT_IN_SPRINTF_CHK:
6050 case BUILT_IN_VSPRINTF_CHK:
6051 maybe_emit_sprintf_chk_warning (exp, fcode);
6052 break;
6054 case BUILT_IN_FREE:
6055 maybe_emit_free_warning (exp);
6056 break;
6058 default: /* just do library call, if unknown builtin */
6059 break;
6062 /* The switch statement above can drop through to cause the function
6063 to be called normally. */
6064 return expand_call (exp, target, ignore);
6067 /* Determine whether a tree node represents a call to a built-in
6068 function. If the tree T is a call to a built-in function with
6069 the right number of arguments of the appropriate types, return
6070 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6071 Otherwise the return value is END_BUILTINS. */
6073 enum built_in_function
6074 builtin_mathfn_code (const_tree t)
6076 const_tree fndecl, arg, parmlist;
6077 const_tree argtype, parmtype;
6078 const_call_expr_arg_iterator iter;
6080 if (TREE_CODE (t) != CALL_EXPR
6081 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6082 return END_BUILTINS;
6084 fndecl = get_callee_fndecl (t);
6085 if (fndecl == NULL_TREE
6086 || TREE_CODE (fndecl) != FUNCTION_DECL
6087 || ! DECL_BUILT_IN (fndecl)
6088 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6089 return END_BUILTINS;
6091 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6092 init_const_call_expr_arg_iterator (t, &iter);
6093 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6095 /* If a function doesn't take a variable number of arguments,
6096 the last element in the list will have type `void'. */
6097 parmtype = TREE_VALUE (parmlist);
6098 if (VOID_TYPE_P (parmtype))
6100 if (more_const_call_expr_args_p (&iter))
6101 return END_BUILTINS;
6102 return DECL_FUNCTION_CODE (fndecl);
6105 if (! more_const_call_expr_args_p (&iter))
6106 return END_BUILTINS;
6108 arg = next_const_call_expr_arg (&iter);
6109 argtype = TREE_TYPE (arg);
6111 if (SCALAR_FLOAT_TYPE_P (parmtype))
6113 if (! SCALAR_FLOAT_TYPE_P (argtype))
6114 return END_BUILTINS;
6116 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6118 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6119 return END_BUILTINS;
6121 else if (POINTER_TYPE_P (parmtype))
6123 if (! POINTER_TYPE_P (argtype))
6124 return END_BUILTINS;
6126 else if (INTEGRAL_TYPE_P (parmtype))
6128 if (! INTEGRAL_TYPE_P (argtype))
6129 return END_BUILTINS;
6131 else
6132 return END_BUILTINS;
6135 /* Variable-length argument list. */
6136 return DECL_FUNCTION_CODE (fndecl);
6139 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6140 evaluate to a constant. */
6142 static tree
6143 fold_builtin_constant_p (tree arg)
6145 /* We return 1 for a numeric type that's known to be a constant
6146 value at compile-time or for an aggregate type that's a
6147 literal constant. */
6148 STRIP_NOPS (arg);
6150 /* If we know this is a constant, emit the constant of one. */
6151 if (CONSTANT_CLASS_P (arg)
6152 || (TREE_CODE (arg) == CONSTRUCTOR
6153 && TREE_CONSTANT (arg)))
6154 return integer_one_node;
6155 if (TREE_CODE (arg) == ADDR_EXPR)
6157 tree op = TREE_OPERAND (arg, 0);
6158 if (TREE_CODE (op) == STRING_CST
6159 || (TREE_CODE (op) == ARRAY_REF
6160 && integer_zerop (TREE_OPERAND (op, 1))
6161 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6162 return integer_one_node;
6165 /* If this expression has side effects, show we don't know it to be a
6166 constant. Likewise if it's a pointer or aggregate type since in
6167 those case we only want literals, since those are only optimized
6168 when generating RTL, not later.
6169 And finally, if we are compiling an initializer, not code, we
6170 need to return a definite result now; there's not going to be any
6171 more optimization done. */
6172 if (TREE_SIDE_EFFECTS (arg)
6173 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6174 || POINTER_TYPE_P (TREE_TYPE (arg))
6175 || cfun == 0
6176 || folding_initializer)
6177 return integer_zero_node;
6179 return NULL_TREE;
6182 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6183 return it as a truthvalue. */
6185 static tree
6186 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6188 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6190 fn = built_in_decls[BUILT_IN_EXPECT];
6191 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6192 ret_type = TREE_TYPE (TREE_TYPE (fn));
6193 pred_type = TREE_VALUE (arg_types);
6194 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6196 pred = fold_convert_loc (loc, pred_type, pred);
6197 expected = fold_convert_loc (loc, expected_type, expected);
6198 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6200 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6201 build_int_cst (ret_type, 0));
6204 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6205 NULL_TREE if no simplification is possible. */
6207 static tree
6208 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6210 tree inner, fndecl;
6211 enum tree_code code;
6213 /* If this is a builtin_expect within a builtin_expect keep the
6214 inner one. See through a comparison against a constant. It
6215 might have been added to create a thruthvalue. */
6216 inner = arg0;
6217 if (COMPARISON_CLASS_P (inner)
6218 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6219 inner = TREE_OPERAND (inner, 0);
6221 if (TREE_CODE (inner) == CALL_EXPR
6222 && (fndecl = get_callee_fndecl (inner))
6223 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6224 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6225 return arg0;
6227 /* Distribute the expected value over short-circuiting operators.
6228 See through the cast from truthvalue_type_node to long. */
6229 inner = arg0;
6230 while (TREE_CODE (inner) == NOP_EXPR
6231 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6232 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6233 inner = TREE_OPERAND (inner, 0);
6235 code = TREE_CODE (inner);
6236 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6238 tree op0 = TREE_OPERAND (inner, 0);
6239 tree op1 = TREE_OPERAND (inner, 1);
6241 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6242 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6243 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6245 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6248 /* If the argument isn't invariant then there's nothing else we can do. */
6249 if (!TREE_CONSTANT (arg0))
6250 return NULL_TREE;
6252 /* If we expect that a comparison against the argument will fold to
6253 a constant return the constant. In practice, this means a true
6254 constant or the address of a non-weak symbol. */
6255 inner = arg0;
6256 STRIP_NOPS (inner);
6257 if (TREE_CODE (inner) == ADDR_EXPR)
6261 inner = TREE_OPERAND (inner, 0);
6263 while (TREE_CODE (inner) == COMPONENT_REF
6264 || TREE_CODE (inner) == ARRAY_REF);
6265 if ((TREE_CODE (inner) == VAR_DECL
6266 || TREE_CODE (inner) == FUNCTION_DECL)
6267 && DECL_WEAK (inner))
6268 return NULL_TREE;
6271 /* Otherwise, ARG0 already has the proper type for the return value. */
6272 return arg0;
6275 /* Fold a call to __builtin_classify_type with argument ARG. */
6277 static tree
6278 fold_builtin_classify_type (tree arg)
6280 if (arg == 0)
6281 return build_int_cst (integer_type_node, no_type_class);
6283 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
6286 /* Fold a call to __builtin_strlen with argument ARG. */
6288 static tree
6289 fold_builtin_strlen (location_t loc, tree type, tree arg)
6291 if (!validate_arg (arg, POINTER_TYPE))
6292 return NULL_TREE;
6293 else
6295 tree len = c_strlen (arg, 0);
6297 if (len)
6298 return fold_convert_loc (loc, type, len);
6300 return NULL_TREE;
6304 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6306 static tree
6307 fold_builtin_inf (location_t loc, tree type, int warn)
6309 REAL_VALUE_TYPE real;
6311 /* __builtin_inff is intended to be usable to define INFINITY on all
6312 targets. If an infinity is not available, INFINITY expands "to a
6313 positive constant of type float that overflows at translation
6314 time", footnote "In this case, using INFINITY will violate the
6315 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6316 Thus we pedwarn to ensure this constraint violation is
6317 diagnosed. */
6318 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6319 pedwarn (loc, 0, "target format does not support infinity");
6321 real_inf (&real);
6322 return build_real (type, real);
6325 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6327 static tree
6328 fold_builtin_nan (tree arg, tree type, int quiet)
6330 REAL_VALUE_TYPE real;
6331 const char *str;
6333 if (!validate_arg (arg, POINTER_TYPE))
6334 return NULL_TREE;
6335 str = c_getstr (arg);
6336 if (!str)
6337 return NULL_TREE;
6339 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6340 return NULL_TREE;
6342 return build_real (type, real);
6345 /* Return true if the floating point expression T has an integer value.
6346 We also allow +Inf, -Inf and NaN to be considered integer values. */
6348 static bool
6349 integer_valued_real_p (tree t)
6351 switch (TREE_CODE (t))
6353 case FLOAT_EXPR:
6354 return true;
6356 case ABS_EXPR:
6357 case SAVE_EXPR:
6358 return integer_valued_real_p (TREE_OPERAND (t, 0));
6360 case COMPOUND_EXPR:
6361 case MODIFY_EXPR:
6362 case BIND_EXPR:
6363 return integer_valued_real_p (TREE_OPERAND (t, 1));
6365 case PLUS_EXPR:
6366 case MINUS_EXPR:
6367 case MULT_EXPR:
6368 case MIN_EXPR:
6369 case MAX_EXPR:
6370 return integer_valued_real_p (TREE_OPERAND (t, 0))
6371 && integer_valued_real_p (TREE_OPERAND (t, 1));
6373 case COND_EXPR:
6374 return integer_valued_real_p (TREE_OPERAND (t, 1))
6375 && integer_valued_real_p (TREE_OPERAND (t, 2));
6377 case REAL_CST:
6378 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6380 case NOP_EXPR:
6382 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6383 if (TREE_CODE (type) == INTEGER_TYPE)
6384 return true;
6385 if (TREE_CODE (type) == REAL_TYPE)
6386 return integer_valued_real_p (TREE_OPERAND (t, 0));
6387 break;
6390 case CALL_EXPR:
6391 switch (builtin_mathfn_code (t))
6393 CASE_FLT_FN (BUILT_IN_CEIL):
6394 CASE_FLT_FN (BUILT_IN_FLOOR):
6395 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6396 CASE_FLT_FN (BUILT_IN_RINT):
6397 CASE_FLT_FN (BUILT_IN_ROUND):
6398 CASE_FLT_FN (BUILT_IN_TRUNC):
6399 return true;
6401 CASE_FLT_FN (BUILT_IN_FMIN):
6402 CASE_FLT_FN (BUILT_IN_FMAX):
6403 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6404 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6406 default:
6407 break;
6409 break;
6411 default:
6412 break;
6414 return false;
6417 /* FNDECL is assumed to be a builtin where truncation can be propagated
6418 across (for instance floor((double)f) == (double)floorf (f).
6419 Do the transformation for a call with argument ARG. */
6421 static tree
6422 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6424 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6426 if (!validate_arg (arg, REAL_TYPE))
6427 return NULL_TREE;
6429 /* Integer rounding functions are idempotent. */
6430 if (fcode == builtin_mathfn_code (arg))
6431 return arg;
6433 /* If argument is already integer valued, and we don't need to worry
6434 about setting errno, there's no need to perform rounding. */
6435 if (! flag_errno_math && integer_valued_real_p (arg))
6436 return arg;
6438 if (optimize)
6440 tree arg0 = strip_float_extensions (arg);
6441 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6442 tree newtype = TREE_TYPE (arg0);
6443 tree decl;
6445 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6446 && (decl = mathfn_built_in (newtype, fcode)))
6447 return fold_convert_loc (loc, ftype,
6448 build_call_expr_loc (loc, decl, 1,
6449 fold_convert_loc (loc,
6450 newtype,
6451 arg0)));
6453 return NULL_TREE;
6456 /* FNDECL is assumed to be builtin which can narrow the FP type of
6457 the argument, for instance lround((double)f) -> lroundf (f).
6458 Do the transformation for a call with argument ARG. */
6460 static tree
6461 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6463 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6465 if (!validate_arg (arg, REAL_TYPE))
6466 return NULL_TREE;
6468 /* If argument is already integer valued, and we don't need to worry
6469 about setting errno, there's no need to perform rounding. */
6470 if (! flag_errno_math && integer_valued_real_p (arg))
6471 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6472 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6474 if (optimize)
6476 tree ftype = TREE_TYPE (arg);
6477 tree arg0 = strip_float_extensions (arg);
6478 tree newtype = TREE_TYPE (arg0);
6479 tree decl;
6481 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6482 && (decl = mathfn_built_in (newtype, fcode)))
6483 return build_call_expr_loc (loc, decl, 1,
6484 fold_convert_loc (loc, newtype, arg0));
6487 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6488 sizeof (long long) == sizeof (long). */
6489 if (TYPE_PRECISION (long_long_integer_type_node)
6490 == TYPE_PRECISION (long_integer_type_node))
6492 tree newfn = NULL_TREE;
6493 switch (fcode)
6495 CASE_FLT_FN (BUILT_IN_LLCEIL):
6496 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6497 break;
6499 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6500 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6501 break;
6503 CASE_FLT_FN (BUILT_IN_LLROUND):
6504 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6505 break;
6507 CASE_FLT_FN (BUILT_IN_LLRINT):
6508 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6509 break;
6511 default:
6512 break;
6515 if (newfn)
6517 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6518 return fold_convert_loc (loc,
6519 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6523 return NULL_TREE;
6526 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6527 return type. Return NULL_TREE if no simplification can be made. */
6529 static tree
6530 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6532 tree res;
6534 if (!validate_arg (arg, COMPLEX_TYPE)
6535 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6536 return NULL_TREE;
6538 /* Calculate the result when the argument is a constant. */
6539 if (TREE_CODE (arg) == COMPLEX_CST
6540 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6541 type, mpfr_hypot)))
6542 return res;
6544 if (TREE_CODE (arg) == COMPLEX_EXPR)
6546 tree real = TREE_OPERAND (arg, 0);
6547 tree imag = TREE_OPERAND (arg, 1);
6549 /* If either part is zero, cabs is fabs of the other. */
6550 if (real_zerop (real))
6551 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6552 if (real_zerop (imag))
6553 return fold_build1_loc (loc, ABS_EXPR, type, real);
6555 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6556 if (flag_unsafe_math_optimizations
6557 && operand_equal_p (real, imag, OEP_PURE_SAME))
6559 const REAL_VALUE_TYPE sqrt2_trunc
6560 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6561 STRIP_NOPS (real);
6562 return fold_build2_loc (loc, MULT_EXPR, type,
6563 fold_build1_loc (loc, ABS_EXPR, type, real),
6564 build_real (type, sqrt2_trunc));
6568 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6569 if (TREE_CODE (arg) == NEGATE_EXPR
6570 || TREE_CODE (arg) == CONJ_EXPR)
6571 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6573 /* Don't do this when optimizing for size. */
6574 if (flag_unsafe_math_optimizations
6575 && optimize && optimize_function_for_speed_p (cfun))
6577 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6579 if (sqrtfn != NULL_TREE)
6581 tree rpart, ipart, result;
6583 arg = builtin_save_expr (arg);
6585 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6586 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6588 rpart = builtin_save_expr (rpart);
6589 ipart = builtin_save_expr (ipart);
6591 result = fold_build2_loc (loc, PLUS_EXPR, type,
6592 fold_build2_loc (loc, MULT_EXPR, type,
6593 rpart, rpart),
6594 fold_build2_loc (loc, MULT_EXPR, type,
6595 ipart, ipart));
6597 return build_call_expr_loc (loc, sqrtfn, 1, result);
6601 return NULL_TREE;
6604 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
6605 complex tree type of the result. If NEG is true, the imaginary
6606 zero is negative. */
6608 static tree
6609 build_complex_cproj (tree type, bool neg)
6611 REAL_VALUE_TYPE rinf, rzero = dconst0;
6613 real_inf (&rinf);
6614 rzero.sign = neg;
6615 return build_complex (type, build_real (TREE_TYPE (type), rinf),
6616 build_real (TREE_TYPE (type), rzero));
6619 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
6620 return type. Return NULL_TREE if no simplification can be made. */
6622 static tree
6623 fold_builtin_cproj (location_t loc, tree arg, tree type)
6625 if (!validate_arg (arg, COMPLEX_TYPE)
6626 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6627 return NULL_TREE;
6629 /* If there are no infinities, return arg. */
6630 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
6631 return non_lvalue_loc (loc, arg);
6633 /* Calculate the result when the argument is a constant. */
6634 if (TREE_CODE (arg) == COMPLEX_CST)
6636 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
6637 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
6639 if (real_isinf (real) || real_isinf (imag))
6640 return build_complex_cproj (type, imag->sign);
6641 else
6642 return arg;
6644 else if (TREE_CODE (arg) == COMPLEX_EXPR)
6646 tree real = TREE_OPERAND (arg, 0);
6647 tree imag = TREE_OPERAND (arg, 1);
6649 STRIP_NOPS (real);
6650 STRIP_NOPS (imag);
6652 /* If the real part is inf and the imag part is known to be
6653 nonnegative, return (inf + 0i). Remember side-effects are
6654 possible in the imag part. */
6655 if (TREE_CODE (real) == REAL_CST
6656 && real_isinf (TREE_REAL_CST_PTR (real))
6657 && tree_expr_nonnegative_p (imag))
6658 return omit_one_operand_loc (loc, type,
6659 build_complex_cproj (type, false),
6660 arg);
6662 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
6663 Remember side-effects are possible in the real part. */
6664 if (TREE_CODE (imag) == REAL_CST
6665 && real_isinf (TREE_REAL_CST_PTR (imag)))
6666 return
6667 omit_one_operand_loc (loc, type,
6668 build_complex_cproj (type, TREE_REAL_CST_PTR
6669 (imag)->sign), arg);
6672 return NULL_TREE;
6675 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6676 Return NULL_TREE if no simplification can be made. */
6678 static tree
6679 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6682 enum built_in_function fcode;
6683 tree res;
6685 if (!validate_arg (arg, REAL_TYPE))
6686 return NULL_TREE;
6688 /* Calculate the result when the argument is a constant. */
6689 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6690 return res;
6692 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6693 fcode = builtin_mathfn_code (arg);
6694 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6696 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6697 arg = fold_build2_loc (loc, MULT_EXPR, type,
6698 CALL_EXPR_ARG (arg, 0),
6699 build_real (type, dconsthalf));
6700 return build_call_expr_loc (loc, expfn, 1, arg);
6703 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6704 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6706 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6708 if (powfn)
6710 tree arg0 = CALL_EXPR_ARG (arg, 0);
6711 tree tree_root;
6712 /* The inner root was either sqrt or cbrt. */
6713 /* This was a conditional expression but it triggered a bug
6714 in Sun C 5.5. */
6715 REAL_VALUE_TYPE dconstroot;
6716 if (BUILTIN_SQRT_P (fcode))
6717 dconstroot = dconsthalf;
6718 else
6719 dconstroot = dconst_third ();
6721 /* Adjust for the outer root. */
6722 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6723 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6724 tree_root = build_real (type, dconstroot);
6725 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6729 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6730 if (flag_unsafe_math_optimizations
6731 && (fcode == BUILT_IN_POW
6732 || fcode == BUILT_IN_POWF
6733 || fcode == BUILT_IN_POWL))
6735 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6736 tree arg0 = CALL_EXPR_ARG (arg, 0);
6737 tree arg1 = CALL_EXPR_ARG (arg, 1);
6738 tree narg1;
6739 if (!tree_expr_nonnegative_p (arg0))
6740 arg0 = build1 (ABS_EXPR, type, arg0);
6741 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
6742 build_real (type, dconsthalf));
6743 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
6746 return NULL_TREE;
6749 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
6750 Return NULL_TREE if no simplification can be made. */
6752 static tree
6753 fold_builtin_cbrt (location_t loc, tree arg, tree type)
6755 const enum built_in_function fcode = builtin_mathfn_code (arg);
6756 tree res;
6758 if (!validate_arg (arg, REAL_TYPE))
6759 return NULL_TREE;
6761 /* Calculate the result when the argument is a constant. */
6762 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
6763 return res;
6765 if (flag_unsafe_math_optimizations)
6767 /* Optimize cbrt(expN(x)) -> expN(x/3). */
6768 if (BUILTIN_EXPONENT_P (fcode))
6770 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6771 const REAL_VALUE_TYPE third_trunc =
6772 real_value_truncate (TYPE_MODE (type), dconst_third ());
6773 arg = fold_build2_loc (loc, MULT_EXPR, type,
6774 CALL_EXPR_ARG (arg, 0),
6775 build_real (type, third_trunc));
6776 return build_call_expr_loc (loc, expfn, 1, arg);
6779 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
6780 if (BUILTIN_SQRT_P (fcode))
6782 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6784 if (powfn)
6786 tree arg0 = CALL_EXPR_ARG (arg, 0);
6787 tree tree_root;
6788 REAL_VALUE_TYPE dconstroot = dconst_third ();
6790 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6791 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6792 tree_root = build_real (type, dconstroot);
6793 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6797 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
6798 if (BUILTIN_CBRT_P (fcode))
6800 tree arg0 = CALL_EXPR_ARG (arg, 0);
6801 if (tree_expr_nonnegative_p (arg0))
6803 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6805 if (powfn)
6807 tree tree_root;
6808 REAL_VALUE_TYPE dconstroot;
6810 real_arithmetic (&dconstroot, MULT_EXPR,
6811 dconst_third_ptr (), dconst_third_ptr ());
6812 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6813 tree_root = build_real (type, dconstroot);
6814 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6819 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
6820 if (fcode == BUILT_IN_POW
6821 || fcode == BUILT_IN_POWF
6822 || fcode == BUILT_IN_POWL)
6824 tree arg00 = CALL_EXPR_ARG (arg, 0);
6825 tree arg01 = CALL_EXPR_ARG (arg, 1);
6826 if (tree_expr_nonnegative_p (arg00))
6828 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6829 const REAL_VALUE_TYPE dconstroot
6830 = real_value_truncate (TYPE_MODE (type), dconst_third ());
6831 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
6832 build_real (type, dconstroot));
6833 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
6837 return NULL_TREE;
6840 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
6841 TYPE is the type of the return value. Return NULL_TREE if no
6842 simplification can be made. */
6844 static tree
6845 fold_builtin_cos (location_t loc,
6846 tree arg, tree type, tree fndecl)
6848 tree res, narg;
6850 if (!validate_arg (arg, REAL_TYPE))
6851 return NULL_TREE;
6853 /* Calculate the result when the argument is a constant. */
6854 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
6855 return res;
6857 /* Optimize cos(-x) into cos (x). */
6858 if ((narg = fold_strip_sign_ops (arg)))
6859 return build_call_expr_loc (loc, fndecl, 1, narg);
6861 return NULL_TREE;
6864 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
6865 Return NULL_TREE if no simplification can be made. */
6867 static tree
6868 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
6870 if (validate_arg (arg, REAL_TYPE))
6872 tree res, narg;
6874 /* Calculate the result when the argument is a constant. */
6875 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
6876 return res;
6878 /* Optimize cosh(-x) into cosh (x). */
6879 if ((narg = fold_strip_sign_ops (arg)))
6880 return build_call_expr_loc (loc, fndecl, 1, narg);
6883 return NULL_TREE;
6886 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
6887 argument ARG. TYPE is the type of the return value. Return
6888 NULL_TREE if no simplification can be made. */
6890 static tree
6891 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
6892 bool hyper)
6894 if (validate_arg (arg, COMPLEX_TYPE)
6895 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
6897 tree tmp;
6899 /* Calculate the result when the argument is a constant. */
6900 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
6901 return tmp;
6903 /* Optimize fn(-x) into fn(x). */
6904 if ((tmp = fold_strip_sign_ops (arg)))
6905 return build_call_expr_loc (loc, fndecl, 1, tmp);
6908 return NULL_TREE;
6911 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
6912 Return NULL_TREE if no simplification can be made. */
6914 static tree
6915 fold_builtin_tan (tree arg, tree type)
6917 enum built_in_function fcode;
6918 tree res;
6920 if (!validate_arg (arg, REAL_TYPE))
6921 return NULL_TREE;
6923 /* Calculate the result when the argument is a constant. */
6924 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
6925 return res;
6927 /* Optimize tan(atan(x)) = x. */
6928 fcode = builtin_mathfn_code (arg);
6929 if (flag_unsafe_math_optimizations
6930 && (fcode == BUILT_IN_ATAN
6931 || fcode == BUILT_IN_ATANF
6932 || fcode == BUILT_IN_ATANL))
6933 return CALL_EXPR_ARG (arg, 0);
6935 return NULL_TREE;
6938 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
6939 NULL_TREE if no simplification can be made. */
6941 static tree
6942 fold_builtin_sincos (location_t loc,
6943 tree arg0, tree arg1, tree arg2)
6945 tree type;
6946 tree res, fn, call;
6948 if (!validate_arg (arg0, REAL_TYPE)
6949 || !validate_arg (arg1, POINTER_TYPE)
6950 || !validate_arg (arg2, POINTER_TYPE))
6951 return NULL_TREE;
6953 type = TREE_TYPE (arg0);
6955 /* Calculate the result when the argument is a constant. */
6956 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
6957 return res;
6959 /* Canonicalize sincos to cexpi. */
6960 if (!TARGET_C99_FUNCTIONS)
6961 return NULL_TREE;
6962 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
6963 if (!fn)
6964 return NULL_TREE;
6966 call = build_call_expr_loc (loc, fn, 1, arg0);
6967 call = builtin_save_expr (call);
6969 return build2 (COMPOUND_EXPR, void_type_node,
6970 build2 (MODIFY_EXPR, void_type_node,
6971 build_fold_indirect_ref_loc (loc, arg1),
6972 build1 (IMAGPART_EXPR, type, call)),
6973 build2 (MODIFY_EXPR, void_type_node,
6974 build_fold_indirect_ref_loc (loc, arg2),
6975 build1 (REALPART_EXPR, type, call)));
6978 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
6979 NULL_TREE if no simplification can be made. */
6981 static tree
6982 fold_builtin_cexp (location_t loc, tree arg0, tree type)
6984 tree rtype;
6985 tree realp, imagp, ifn;
6986 tree res;
6988 if (!validate_arg (arg0, COMPLEX_TYPE)
6989 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
6990 return NULL_TREE;
6992 /* Calculate the result when the argument is a constant. */
6993 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
6994 return res;
6996 rtype = TREE_TYPE (TREE_TYPE (arg0));
6998 /* In case we can figure out the real part of arg0 and it is constant zero
6999 fold to cexpi. */
7000 if (!TARGET_C99_FUNCTIONS)
7001 return NULL_TREE;
7002 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7003 if (!ifn)
7004 return NULL_TREE;
7006 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7007 && real_zerop (realp))
7009 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7010 return build_call_expr_loc (loc, ifn, 1, narg);
7013 /* In case we can easily decompose real and imaginary parts split cexp
7014 to exp (r) * cexpi (i). */
7015 if (flag_unsafe_math_optimizations
7016 && realp)
7018 tree rfn, rcall, icall;
7020 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7021 if (!rfn)
7022 return NULL_TREE;
7024 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7025 if (!imagp)
7026 return NULL_TREE;
7028 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7029 icall = builtin_save_expr (icall);
7030 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7031 rcall = builtin_save_expr (rcall);
7032 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7033 fold_build2_loc (loc, MULT_EXPR, rtype,
7034 rcall,
7035 fold_build1_loc (loc, REALPART_EXPR,
7036 rtype, icall)),
7037 fold_build2_loc (loc, MULT_EXPR, rtype,
7038 rcall,
7039 fold_build1_loc (loc, IMAGPART_EXPR,
7040 rtype, icall)));
7043 return NULL_TREE;
7046 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7047 Return NULL_TREE if no simplification can be made. */
7049 static tree
7050 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7052 if (!validate_arg (arg, REAL_TYPE))
7053 return NULL_TREE;
7055 /* Optimize trunc of constant value. */
7056 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7058 REAL_VALUE_TYPE r, x;
7059 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7061 x = TREE_REAL_CST (arg);
7062 real_trunc (&r, TYPE_MODE (type), &x);
7063 return build_real (type, r);
7066 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7069 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7070 Return NULL_TREE if no simplification can be made. */
7072 static tree
7073 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7075 if (!validate_arg (arg, REAL_TYPE))
7076 return NULL_TREE;
7078 /* Optimize floor of constant value. */
7079 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7081 REAL_VALUE_TYPE x;
7083 x = TREE_REAL_CST (arg);
7084 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7086 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7087 REAL_VALUE_TYPE r;
7089 real_floor (&r, TYPE_MODE (type), &x);
7090 return build_real (type, r);
7094 /* Fold floor (x) where x is nonnegative to trunc (x). */
7095 if (tree_expr_nonnegative_p (arg))
7097 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7098 if (truncfn)
7099 return build_call_expr_loc (loc, truncfn, 1, arg);
7102 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7105 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7106 Return NULL_TREE if no simplification can be made. */
7108 static tree
7109 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7111 if (!validate_arg (arg, REAL_TYPE))
7112 return NULL_TREE;
7114 /* Optimize ceil of constant value. */
7115 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7117 REAL_VALUE_TYPE x;
7119 x = TREE_REAL_CST (arg);
7120 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7122 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7123 REAL_VALUE_TYPE r;
7125 real_ceil (&r, TYPE_MODE (type), &x);
7126 return build_real (type, r);
7130 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7133 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7134 Return NULL_TREE if no simplification can be made. */
7136 static tree
7137 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7139 if (!validate_arg (arg, REAL_TYPE))
7140 return NULL_TREE;
7142 /* Optimize round of constant value. */
7143 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7145 REAL_VALUE_TYPE x;
7147 x = TREE_REAL_CST (arg);
7148 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7150 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7151 REAL_VALUE_TYPE r;
7153 real_round (&r, TYPE_MODE (type), &x);
7154 return build_real (type, r);
7158 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7161 /* Fold function call to builtin lround, lroundf or lroundl (or the
7162 corresponding long long versions) and other rounding functions. ARG
7163 is the argument to the call. Return NULL_TREE if no simplification
7164 can be made. */
7166 static tree
7167 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7169 if (!validate_arg (arg, REAL_TYPE))
7170 return NULL_TREE;
7172 /* Optimize lround of constant value. */
7173 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7175 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7177 if (real_isfinite (&x))
7179 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7180 tree ftype = TREE_TYPE (arg);
7181 double_int val;
7182 REAL_VALUE_TYPE r;
7184 switch (DECL_FUNCTION_CODE (fndecl))
7186 CASE_FLT_FN (BUILT_IN_LFLOOR):
7187 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7188 real_floor (&r, TYPE_MODE (ftype), &x);
7189 break;
7191 CASE_FLT_FN (BUILT_IN_LCEIL):
7192 CASE_FLT_FN (BUILT_IN_LLCEIL):
7193 real_ceil (&r, TYPE_MODE (ftype), &x);
7194 break;
7196 CASE_FLT_FN (BUILT_IN_LROUND):
7197 CASE_FLT_FN (BUILT_IN_LLROUND):
7198 real_round (&r, TYPE_MODE (ftype), &x);
7199 break;
7201 default:
7202 gcc_unreachable ();
7205 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7206 if (double_int_fits_to_tree_p (itype, val))
7207 return double_int_to_tree (itype, val);
7211 switch (DECL_FUNCTION_CODE (fndecl))
7213 CASE_FLT_FN (BUILT_IN_LFLOOR):
7214 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7215 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7216 if (tree_expr_nonnegative_p (arg))
7217 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7218 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7219 break;
7220 default:;
7223 return fold_fixed_mathfn (loc, fndecl, arg);
7226 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7227 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7228 the argument to the call. Return NULL_TREE if no simplification can
7229 be made. */
7231 static tree
7232 fold_builtin_bitop (tree fndecl, tree arg)
7234 if (!validate_arg (arg, INTEGER_TYPE))
7235 return NULL_TREE;
7237 /* Optimize for constant argument. */
7238 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7240 HOST_WIDE_INT hi, width, result;
7241 unsigned HOST_WIDE_INT lo;
7242 tree type;
7244 type = TREE_TYPE (arg);
7245 width = TYPE_PRECISION (type);
7246 lo = TREE_INT_CST_LOW (arg);
7248 /* Clear all the bits that are beyond the type's precision. */
7249 if (width > HOST_BITS_PER_WIDE_INT)
7251 hi = TREE_INT_CST_HIGH (arg);
7252 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7253 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7255 else
7257 hi = 0;
7258 if (width < HOST_BITS_PER_WIDE_INT)
7259 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7262 switch (DECL_FUNCTION_CODE (fndecl))
7264 CASE_INT_FN (BUILT_IN_FFS):
7265 if (lo != 0)
7266 result = ffs_hwi (lo);
7267 else if (hi != 0)
7268 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7269 else
7270 result = 0;
7271 break;
7273 CASE_INT_FN (BUILT_IN_CLZ):
7274 if (hi != 0)
7275 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7276 else if (lo != 0)
7277 result = width - floor_log2 (lo) - 1;
7278 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7279 result = width;
7280 break;
7282 CASE_INT_FN (BUILT_IN_CTZ):
7283 if (lo != 0)
7284 result = ctz_hwi (lo);
7285 else if (hi != 0)
7286 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7287 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7288 result = width;
7289 break;
7291 CASE_INT_FN (BUILT_IN_POPCOUNT):
7292 result = 0;
7293 while (lo)
7294 result++, lo &= lo - 1;
7295 while (hi)
7296 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7297 break;
7299 CASE_INT_FN (BUILT_IN_PARITY):
7300 result = 0;
7301 while (lo)
7302 result++, lo &= lo - 1;
7303 while (hi)
7304 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7305 result &= 1;
7306 break;
7308 default:
7309 gcc_unreachable ();
7312 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7315 return NULL_TREE;
7318 /* Fold function call to builtin_bswap and the long and long long
7319 variants. Return NULL_TREE if no simplification can be made. */
7320 static tree
7321 fold_builtin_bswap (tree fndecl, tree arg)
7323 if (! validate_arg (arg, INTEGER_TYPE))
7324 return NULL_TREE;
7326 /* Optimize constant value. */
7327 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7329 HOST_WIDE_INT hi, width, r_hi = 0;
7330 unsigned HOST_WIDE_INT lo, r_lo = 0;
7331 tree type;
7333 type = TREE_TYPE (arg);
7334 width = TYPE_PRECISION (type);
7335 lo = TREE_INT_CST_LOW (arg);
7336 hi = TREE_INT_CST_HIGH (arg);
7338 switch (DECL_FUNCTION_CODE (fndecl))
7340 case BUILT_IN_BSWAP32:
7341 case BUILT_IN_BSWAP64:
7343 int s;
7345 for (s = 0; s < width; s += 8)
7347 int d = width - s - 8;
7348 unsigned HOST_WIDE_INT byte;
7350 if (s < HOST_BITS_PER_WIDE_INT)
7351 byte = (lo >> s) & 0xff;
7352 else
7353 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7355 if (d < HOST_BITS_PER_WIDE_INT)
7356 r_lo |= byte << d;
7357 else
7358 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7362 break;
7364 default:
7365 gcc_unreachable ();
7368 if (width < HOST_BITS_PER_WIDE_INT)
7369 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7370 else
7371 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7374 return NULL_TREE;
7377 /* A subroutine of fold_builtin to fold the various logarithmic
7378 functions. Return NULL_TREE if no simplification can me made.
7379 FUNC is the corresponding MPFR logarithm function. */
7381 static tree
7382 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7383 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7385 if (validate_arg (arg, REAL_TYPE))
7387 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7388 tree res;
7389 const enum built_in_function fcode = builtin_mathfn_code (arg);
7391 /* Calculate the result when the argument is a constant. */
7392 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7393 return res;
7395 /* Special case, optimize logN(expN(x)) = x. */
7396 if (flag_unsafe_math_optimizations
7397 && ((func == mpfr_log
7398 && (fcode == BUILT_IN_EXP
7399 || fcode == BUILT_IN_EXPF
7400 || fcode == BUILT_IN_EXPL))
7401 || (func == mpfr_log2
7402 && (fcode == BUILT_IN_EXP2
7403 || fcode == BUILT_IN_EXP2F
7404 || fcode == BUILT_IN_EXP2L))
7405 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7406 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7408 /* Optimize logN(func()) for various exponential functions. We
7409 want to determine the value "x" and the power "exponent" in
7410 order to transform logN(x**exponent) into exponent*logN(x). */
7411 if (flag_unsafe_math_optimizations)
7413 tree exponent = 0, x = 0;
7415 switch (fcode)
7417 CASE_FLT_FN (BUILT_IN_EXP):
7418 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7419 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7420 dconst_e ()));
7421 exponent = CALL_EXPR_ARG (arg, 0);
7422 break;
7423 CASE_FLT_FN (BUILT_IN_EXP2):
7424 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7425 x = build_real (type, dconst2);
7426 exponent = CALL_EXPR_ARG (arg, 0);
7427 break;
7428 CASE_FLT_FN (BUILT_IN_EXP10):
7429 CASE_FLT_FN (BUILT_IN_POW10):
7430 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7432 REAL_VALUE_TYPE dconst10;
7433 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7434 x = build_real (type, dconst10);
7436 exponent = CALL_EXPR_ARG (arg, 0);
7437 break;
7438 CASE_FLT_FN (BUILT_IN_SQRT):
7439 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7440 x = CALL_EXPR_ARG (arg, 0);
7441 exponent = build_real (type, dconsthalf);
7442 break;
7443 CASE_FLT_FN (BUILT_IN_CBRT):
7444 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7445 x = CALL_EXPR_ARG (arg, 0);
7446 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7447 dconst_third ()));
7448 break;
7449 CASE_FLT_FN (BUILT_IN_POW):
7450 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7451 x = CALL_EXPR_ARG (arg, 0);
7452 exponent = CALL_EXPR_ARG (arg, 1);
7453 break;
7454 default:
7455 break;
7458 /* Now perform the optimization. */
7459 if (x && exponent)
7461 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7462 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7467 return NULL_TREE;
7470 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7471 NULL_TREE if no simplification can be made. */
7473 static tree
7474 fold_builtin_hypot (location_t loc, tree fndecl,
7475 tree arg0, tree arg1, tree type)
7477 tree res, narg0, narg1;
7479 if (!validate_arg (arg0, REAL_TYPE)
7480 || !validate_arg (arg1, REAL_TYPE))
7481 return NULL_TREE;
7483 /* Calculate the result when the argument is a constant. */
7484 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7485 return res;
7487 /* If either argument to hypot has a negate or abs, strip that off.
7488 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7489 narg0 = fold_strip_sign_ops (arg0);
7490 narg1 = fold_strip_sign_ops (arg1);
7491 if (narg0 || narg1)
7493 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7494 narg1 ? narg1 : arg1);
7497 /* If either argument is zero, hypot is fabs of the other. */
7498 if (real_zerop (arg0))
7499 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7500 else if (real_zerop (arg1))
7501 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7503 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7504 if (flag_unsafe_math_optimizations
7505 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7507 const REAL_VALUE_TYPE sqrt2_trunc
7508 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7509 return fold_build2_loc (loc, MULT_EXPR, type,
7510 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7511 build_real (type, sqrt2_trunc));
7514 return NULL_TREE;
7518 /* Fold a builtin function call to pow, powf, or powl. Return
7519 NULL_TREE if no simplification can be made. */
7520 static tree
7521 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7523 tree res;
7525 if (!validate_arg (arg0, REAL_TYPE)
7526 || !validate_arg (arg1, REAL_TYPE))
7527 return NULL_TREE;
7529 /* Calculate the result when the argument is a constant. */
7530 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7531 return res;
7533 /* Optimize pow(1.0,y) = 1.0. */
7534 if (real_onep (arg0))
7535 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7537 if (TREE_CODE (arg1) == REAL_CST
7538 && !TREE_OVERFLOW (arg1))
7540 REAL_VALUE_TYPE cint;
7541 REAL_VALUE_TYPE c;
7542 HOST_WIDE_INT n;
7544 c = TREE_REAL_CST (arg1);
7546 /* Optimize pow(x,0.0) = 1.0. */
7547 if (REAL_VALUES_EQUAL (c, dconst0))
7548 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7549 arg0);
7551 /* Optimize pow(x,1.0) = x. */
7552 if (REAL_VALUES_EQUAL (c, dconst1))
7553 return arg0;
7555 /* Optimize pow(x,-1.0) = 1.0/x. */
7556 if (REAL_VALUES_EQUAL (c, dconstm1))
7557 return fold_build2_loc (loc, RDIV_EXPR, type,
7558 build_real (type, dconst1), arg0);
7560 /* Optimize pow(x,0.5) = sqrt(x). */
7561 if (flag_unsafe_math_optimizations
7562 && REAL_VALUES_EQUAL (c, dconsthalf))
7564 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7566 if (sqrtfn != NULL_TREE)
7567 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7570 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7571 if (flag_unsafe_math_optimizations)
7573 const REAL_VALUE_TYPE dconstroot
7574 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7576 if (REAL_VALUES_EQUAL (c, dconstroot))
7578 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7579 if (cbrtfn != NULL_TREE)
7580 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7584 /* Check for an integer exponent. */
7585 n = real_to_integer (&c);
7586 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7587 if (real_identical (&c, &cint))
7589 /* Attempt to evaluate pow at compile-time, unless this should
7590 raise an exception. */
7591 if (TREE_CODE (arg0) == REAL_CST
7592 && !TREE_OVERFLOW (arg0)
7593 && (n > 0
7594 || (!flag_trapping_math && !flag_errno_math)
7595 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7597 REAL_VALUE_TYPE x;
7598 bool inexact;
7600 x = TREE_REAL_CST (arg0);
7601 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7602 if (flag_unsafe_math_optimizations || !inexact)
7603 return build_real (type, x);
7606 /* Strip sign ops from even integer powers. */
7607 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7609 tree narg0 = fold_strip_sign_ops (arg0);
7610 if (narg0)
7611 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7616 if (flag_unsafe_math_optimizations)
7618 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7620 /* Optimize pow(expN(x),y) = expN(x*y). */
7621 if (BUILTIN_EXPONENT_P (fcode))
7623 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7624 tree arg = CALL_EXPR_ARG (arg0, 0);
7625 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7626 return build_call_expr_loc (loc, expfn, 1, arg);
7629 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7630 if (BUILTIN_SQRT_P (fcode))
7632 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7633 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7634 build_real (type, dconsthalf));
7635 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7638 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7639 if (BUILTIN_CBRT_P (fcode))
7641 tree arg = CALL_EXPR_ARG (arg0, 0);
7642 if (tree_expr_nonnegative_p (arg))
7644 const REAL_VALUE_TYPE dconstroot
7645 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7646 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7647 build_real (type, dconstroot));
7648 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7652 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7653 if (fcode == BUILT_IN_POW
7654 || fcode == BUILT_IN_POWF
7655 || fcode == BUILT_IN_POWL)
7657 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7658 if (tree_expr_nonnegative_p (arg00))
7660 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7661 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7662 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7667 return NULL_TREE;
7670 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7671 Return NULL_TREE if no simplification can be made. */
7672 static tree
7673 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7674 tree arg0, tree arg1, tree type)
7676 if (!validate_arg (arg0, REAL_TYPE)
7677 || !validate_arg (arg1, INTEGER_TYPE))
7678 return NULL_TREE;
7680 /* Optimize pow(1.0,y) = 1.0. */
7681 if (real_onep (arg0))
7682 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7684 if (host_integerp (arg1, 0))
7686 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7688 /* Evaluate powi at compile-time. */
7689 if (TREE_CODE (arg0) == REAL_CST
7690 && !TREE_OVERFLOW (arg0))
7692 REAL_VALUE_TYPE x;
7693 x = TREE_REAL_CST (arg0);
7694 real_powi (&x, TYPE_MODE (type), &x, c);
7695 return build_real (type, x);
7698 /* Optimize pow(x,0) = 1.0. */
7699 if (c == 0)
7700 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7701 arg0);
7703 /* Optimize pow(x,1) = x. */
7704 if (c == 1)
7705 return arg0;
7707 /* Optimize pow(x,-1) = 1.0/x. */
7708 if (c == -1)
7709 return fold_build2_loc (loc, RDIV_EXPR, type,
7710 build_real (type, dconst1), arg0);
7713 return NULL_TREE;
7716 /* A subroutine of fold_builtin to fold the various exponent
7717 functions. Return NULL_TREE if no simplification can be made.
7718 FUNC is the corresponding MPFR exponent function. */
7720 static tree
7721 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7722 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7724 if (validate_arg (arg, REAL_TYPE))
7726 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7727 tree res;
7729 /* Calculate the result when the argument is a constant. */
7730 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
7731 return res;
7733 /* Optimize expN(logN(x)) = x. */
7734 if (flag_unsafe_math_optimizations)
7736 const enum built_in_function fcode = builtin_mathfn_code (arg);
7738 if ((func == mpfr_exp
7739 && (fcode == BUILT_IN_LOG
7740 || fcode == BUILT_IN_LOGF
7741 || fcode == BUILT_IN_LOGL))
7742 || (func == mpfr_exp2
7743 && (fcode == BUILT_IN_LOG2
7744 || fcode == BUILT_IN_LOG2F
7745 || fcode == BUILT_IN_LOG2L))
7746 || (func == mpfr_exp10
7747 && (fcode == BUILT_IN_LOG10
7748 || fcode == BUILT_IN_LOG10F
7749 || fcode == BUILT_IN_LOG10L)))
7750 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7754 return NULL_TREE;
7757 /* Return true if VAR is a VAR_DECL or a component thereof. */
7759 static bool
7760 var_decl_component_p (tree var)
7762 tree inner = var;
7763 while (handled_component_p (inner))
7764 inner = TREE_OPERAND (inner, 0);
7765 return SSA_VAR_P (inner);
7768 /* Fold function call to builtin memset. Return
7769 NULL_TREE if no simplification can be made. */
7771 static tree
7772 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
7773 tree type, bool ignore)
7775 tree var, ret, etype;
7776 unsigned HOST_WIDE_INT length, cval;
7778 if (! validate_arg (dest, POINTER_TYPE)
7779 || ! validate_arg (c, INTEGER_TYPE)
7780 || ! validate_arg (len, INTEGER_TYPE))
7781 return NULL_TREE;
7783 if (! host_integerp (len, 1))
7784 return NULL_TREE;
7786 /* If the LEN parameter is zero, return DEST. */
7787 if (integer_zerop (len))
7788 return omit_one_operand_loc (loc, type, dest, c);
7790 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
7791 return NULL_TREE;
7793 var = dest;
7794 STRIP_NOPS (var);
7795 if (TREE_CODE (var) != ADDR_EXPR)
7796 return NULL_TREE;
7798 var = TREE_OPERAND (var, 0);
7799 if (TREE_THIS_VOLATILE (var))
7800 return NULL_TREE;
7802 etype = TREE_TYPE (var);
7803 if (TREE_CODE (etype) == ARRAY_TYPE)
7804 etype = TREE_TYPE (etype);
7806 if (!INTEGRAL_TYPE_P (etype)
7807 && !POINTER_TYPE_P (etype))
7808 return NULL_TREE;
7810 if (! var_decl_component_p (var))
7811 return NULL_TREE;
7813 length = tree_low_cst (len, 1);
7814 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
7815 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
7816 < length)
7817 return NULL_TREE;
7819 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
7820 return NULL_TREE;
7822 if (integer_zerop (c))
7823 cval = 0;
7824 else
7826 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
7827 return NULL_TREE;
7829 cval = TREE_INT_CST_LOW (c);
7830 cval &= 0xff;
7831 cval |= cval << 8;
7832 cval |= cval << 16;
7833 cval |= (cval << 31) << 1;
7836 ret = build_int_cst_type (etype, cval);
7837 var = build_fold_indirect_ref_loc (loc,
7838 fold_convert_loc (loc,
7839 build_pointer_type (etype),
7840 dest));
7841 ret = build2 (MODIFY_EXPR, etype, var, ret);
7842 if (ignore)
7843 return ret;
7845 return omit_one_operand_loc (loc, type, dest, ret);
7848 /* Fold function call to builtin memset. Return
7849 NULL_TREE if no simplification can be made. */
7851 static tree
7852 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
7854 if (! validate_arg (dest, POINTER_TYPE)
7855 || ! validate_arg (size, INTEGER_TYPE))
7856 return NULL_TREE;
7858 if (!ignore)
7859 return NULL_TREE;
7861 /* New argument list transforming bzero(ptr x, int y) to
7862 memset(ptr x, int 0, size_t y). This is done this way
7863 so that if it isn't expanded inline, we fallback to
7864 calling bzero instead of memset. */
7866 return fold_builtin_memset (loc, dest, integer_zero_node,
7867 fold_convert_loc (loc, sizetype, size),
7868 void_type_node, ignore);
7871 /* Fold function call to builtin mem{{,p}cpy,move}. Return
7872 NULL_TREE if no simplification can be made.
7873 If ENDP is 0, return DEST (like memcpy).
7874 If ENDP is 1, return DEST+LEN (like mempcpy).
7875 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
7876 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
7877 (memmove). */
7879 static tree
7880 fold_builtin_memory_op (location_t loc, tree dest, tree src,
7881 tree len, tree type, bool ignore, int endp)
7883 tree destvar, srcvar, expr;
7885 if (! validate_arg (dest, POINTER_TYPE)
7886 || ! validate_arg (src, POINTER_TYPE)
7887 || ! validate_arg (len, INTEGER_TYPE))
7888 return NULL_TREE;
7890 /* If the LEN parameter is zero, return DEST. */
7891 if (integer_zerop (len))
7892 return omit_one_operand_loc (loc, type, dest, src);
7894 /* If SRC and DEST are the same (and not volatile), return
7895 DEST{,+LEN,+LEN-1}. */
7896 if (operand_equal_p (src, dest, 0))
7897 expr = len;
7898 else
7900 tree srctype, desttype;
7901 unsigned int src_align, dest_align;
7902 tree off0;
7904 if (endp == 3)
7906 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
7907 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
7909 /* Both DEST and SRC must be pointer types.
7910 ??? This is what old code did. Is the testing for pointer types
7911 really mandatory?
7913 If either SRC is readonly or length is 1, we can use memcpy. */
7914 if (!dest_align || !src_align)
7915 return NULL_TREE;
7916 if (readonly_data_expr (src)
7917 || (host_integerp (len, 1)
7918 && (MIN (src_align, dest_align) / BITS_PER_UNIT
7919 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
7921 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
7922 if (!fn)
7923 return NULL_TREE;
7924 return build_call_expr_loc (loc, fn, 3, dest, src, len);
7927 /* If *src and *dest can't overlap, optimize into memcpy as well. */
7928 if (TREE_CODE (src) == ADDR_EXPR
7929 && TREE_CODE (dest) == ADDR_EXPR)
7931 tree src_base, dest_base, fn;
7932 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
7933 HOST_WIDE_INT size = -1;
7934 HOST_WIDE_INT maxsize = -1;
7936 srcvar = TREE_OPERAND (src, 0);
7937 src_base = get_ref_base_and_extent (srcvar, &src_offset,
7938 &size, &maxsize);
7939 destvar = TREE_OPERAND (dest, 0);
7940 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
7941 &size, &maxsize);
7942 if (host_integerp (len, 1))
7943 maxsize = tree_low_cst (len, 1);
7944 else
7945 maxsize = -1;
7946 src_offset /= BITS_PER_UNIT;
7947 dest_offset /= BITS_PER_UNIT;
7948 if (SSA_VAR_P (src_base)
7949 && SSA_VAR_P (dest_base))
7951 if (operand_equal_p (src_base, dest_base, 0)
7952 && ranges_overlap_p (src_offset, maxsize,
7953 dest_offset, maxsize))
7954 return NULL_TREE;
7956 else if (TREE_CODE (src_base) == MEM_REF
7957 && TREE_CODE (dest_base) == MEM_REF)
7959 double_int off;
7960 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
7961 TREE_OPERAND (dest_base, 0), 0))
7962 return NULL_TREE;
7963 off = double_int_add (mem_ref_offset (src_base),
7964 shwi_to_double_int (src_offset));
7965 if (!double_int_fits_in_shwi_p (off))
7966 return NULL_TREE;
7967 src_offset = off.low;
7968 off = double_int_add (mem_ref_offset (dest_base),
7969 shwi_to_double_int (dest_offset));
7970 if (!double_int_fits_in_shwi_p (off))
7971 return NULL_TREE;
7972 dest_offset = off.low;
7973 if (ranges_overlap_p (src_offset, maxsize,
7974 dest_offset, maxsize))
7975 return NULL_TREE;
7977 else
7978 return NULL_TREE;
7980 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
7981 if (!fn)
7982 return NULL_TREE;
7983 return build_call_expr_loc (loc, fn, 3, dest, src, len);
7986 /* If the destination and source do not alias optimize into
7987 memcpy as well. */
7988 if ((is_gimple_min_invariant (dest)
7989 || TREE_CODE (dest) == SSA_NAME)
7990 && (is_gimple_min_invariant (src)
7991 || TREE_CODE (src) == SSA_NAME))
7993 ao_ref destr, srcr;
7994 ao_ref_init_from_ptr_and_size (&destr, dest, len);
7995 ao_ref_init_from_ptr_and_size (&srcr, src, len);
7996 if (!refs_may_alias_p_1 (&destr, &srcr, false))
7998 tree fn;
7999 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8000 if (!fn)
8001 return NULL_TREE;
8002 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8006 return NULL_TREE;
8009 if (!host_integerp (len, 0))
8010 return NULL_TREE;
8011 /* FIXME:
8012 This logic lose for arguments like (type *)malloc (sizeof (type)),
8013 since we strip the casts of up to VOID return value from malloc.
8014 Perhaps we ought to inherit type from non-VOID argument here? */
8015 STRIP_NOPS (src);
8016 STRIP_NOPS (dest);
8017 if (!POINTER_TYPE_P (TREE_TYPE (src))
8018 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8019 return NULL_TREE;
8020 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8021 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8023 tree tem = TREE_OPERAND (src, 0);
8024 STRIP_NOPS (tem);
8025 if (tem != TREE_OPERAND (src, 0))
8026 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8028 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8030 tree tem = TREE_OPERAND (dest, 0);
8031 STRIP_NOPS (tem);
8032 if (tem != TREE_OPERAND (dest, 0))
8033 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8035 srctype = TREE_TYPE (TREE_TYPE (src));
8036 if (TREE_CODE (srctype) == ARRAY_TYPE
8037 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8039 srctype = TREE_TYPE (srctype);
8040 STRIP_NOPS (src);
8041 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8043 desttype = TREE_TYPE (TREE_TYPE (dest));
8044 if (TREE_CODE (desttype) == ARRAY_TYPE
8045 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8047 desttype = TREE_TYPE (desttype);
8048 STRIP_NOPS (dest);
8049 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8051 if (TREE_ADDRESSABLE (srctype)
8052 || TREE_ADDRESSABLE (desttype))
8053 return NULL_TREE;
8055 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8056 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8057 if (dest_align < TYPE_ALIGN (desttype)
8058 || src_align < TYPE_ALIGN (srctype))
8059 return NULL_TREE;
8061 if (!ignore)
8062 dest = builtin_save_expr (dest);
8064 /* Build accesses at offset zero with a ref-all character type. */
8065 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8066 ptr_mode, true), 0);
8068 destvar = dest;
8069 STRIP_NOPS (destvar);
8070 if (TREE_CODE (destvar) == ADDR_EXPR
8071 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8072 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8073 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8074 else
8075 destvar = NULL_TREE;
8077 srcvar = src;
8078 STRIP_NOPS (srcvar);
8079 if (TREE_CODE (srcvar) == ADDR_EXPR
8080 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8081 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8083 if (!destvar
8084 || src_align >= TYPE_ALIGN (desttype))
8085 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8086 srcvar, off0);
8087 else if (!STRICT_ALIGNMENT)
8089 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8090 src_align);
8091 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8093 else
8094 srcvar = NULL_TREE;
8096 else
8097 srcvar = NULL_TREE;
8099 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8100 return NULL_TREE;
8102 if (srcvar == NULL_TREE)
8104 STRIP_NOPS (src);
8105 if (src_align >= TYPE_ALIGN (desttype))
8106 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8107 else
8109 if (STRICT_ALIGNMENT)
8110 return NULL_TREE;
8111 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8112 src_align);
8113 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8116 else if (destvar == NULL_TREE)
8118 STRIP_NOPS (dest);
8119 if (dest_align >= TYPE_ALIGN (srctype))
8120 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8121 else
8123 if (STRICT_ALIGNMENT)
8124 return NULL_TREE;
8125 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8126 dest_align);
8127 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8131 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8134 if (ignore)
8135 return expr;
8137 if (endp == 0 || endp == 3)
8138 return omit_one_operand_loc (loc, type, dest, expr);
8140 if (expr == len)
8141 expr = NULL_TREE;
8143 if (endp == 2)
8144 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8145 ssize_int (1));
8147 len = fold_convert_loc (loc, sizetype, len);
8148 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8149 dest = fold_convert_loc (loc, type, dest);
8150 if (expr)
8151 dest = omit_one_operand_loc (loc, type, dest, expr);
8152 return dest;
8155 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8156 If LEN is not NULL, it represents the length of the string to be
8157 copied. Return NULL_TREE if no simplification can be made. */
8159 tree
8160 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8162 tree fn;
8164 if (!validate_arg (dest, POINTER_TYPE)
8165 || !validate_arg (src, POINTER_TYPE))
8166 return NULL_TREE;
8168 /* If SRC and DEST are the same (and not volatile), return DEST. */
8169 if (operand_equal_p (src, dest, 0))
8170 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8172 if (optimize_function_for_size_p (cfun))
8173 return NULL_TREE;
8175 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8176 if (!fn)
8177 return NULL_TREE;
8179 if (!len)
8181 len = c_strlen (src, 1);
8182 if (! len || TREE_SIDE_EFFECTS (len))
8183 return NULL_TREE;
8186 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8187 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8188 build_call_expr_loc (loc, fn, 3, dest, src, len));
8191 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8192 Return NULL_TREE if no simplification can be made. */
8194 static tree
8195 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8197 tree fn, len, lenp1, call, type;
8199 if (!validate_arg (dest, POINTER_TYPE)
8200 || !validate_arg (src, POINTER_TYPE))
8201 return NULL_TREE;
8203 len = c_strlen (src, 1);
8204 if (!len
8205 || TREE_CODE (len) != INTEGER_CST)
8206 return NULL_TREE;
8208 if (optimize_function_for_size_p (cfun)
8209 /* If length is zero it's small enough. */
8210 && !integer_zerop (len))
8211 return NULL_TREE;
8213 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8214 if (!fn)
8215 return NULL_TREE;
8217 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8218 /* We use dest twice in building our expression. Save it from
8219 multiple expansions. */
8220 dest = builtin_save_expr (dest);
8221 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8223 type = TREE_TYPE (TREE_TYPE (fndecl));
8224 len = fold_convert_loc (loc, sizetype, len);
8225 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8226 dest = fold_convert_loc (loc, type, dest);
8227 dest = omit_one_operand_loc (loc, type, dest, call);
8228 return dest;
8231 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8232 If SLEN is not NULL, it represents the length of the source string.
8233 Return NULL_TREE if no simplification can be made. */
8235 tree
8236 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8237 tree src, tree len, tree slen)
8239 tree fn;
8241 if (!validate_arg (dest, POINTER_TYPE)
8242 || !validate_arg (src, POINTER_TYPE)
8243 || !validate_arg (len, INTEGER_TYPE))
8244 return NULL_TREE;
8246 /* If the LEN parameter is zero, return DEST. */
8247 if (integer_zerop (len))
8248 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8250 /* We can't compare slen with len as constants below if len is not a
8251 constant. */
8252 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8253 return NULL_TREE;
8255 if (!slen)
8256 slen = c_strlen (src, 1);
8258 /* Now, we must be passed a constant src ptr parameter. */
8259 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8260 return NULL_TREE;
8262 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8264 /* We do not support simplification of this case, though we do
8265 support it when expanding trees into RTL. */
8266 /* FIXME: generate a call to __builtin_memset. */
8267 if (tree_int_cst_lt (slen, len))
8268 return NULL_TREE;
8270 /* OK transform into builtin memcpy. */
8271 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8272 if (!fn)
8273 return NULL_TREE;
8274 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8275 build_call_expr_loc (loc, fn, 3, dest, src, len));
8278 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8279 arguments to the call, and TYPE is its return type.
8280 Return NULL_TREE if no simplification can be made. */
8282 static tree
8283 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8285 if (!validate_arg (arg1, POINTER_TYPE)
8286 || !validate_arg (arg2, INTEGER_TYPE)
8287 || !validate_arg (len, INTEGER_TYPE))
8288 return NULL_TREE;
8289 else
8291 const char *p1;
8293 if (TREE_CODE (arg2) != INTEGER_CST
8294 || !host_integerp (len, 1))
8295 return NULL_TREE;
8297 p1 = c_getstr (arg1);
8298 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8300 char c;
8301 const char *r;
8302 tree tem;
8304 if (target_char_cast (arg2, &c))
8305 return NULL_TREE;
8307 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8309 if (r == NULL)
8310 return build_int_cst (TREE_TYPE (arg1), 0);
8312 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8313 size_int (r - p1));
8314 return fold_convert_loc (loc, type, tem);
8316 return NULL_TREE;
8320 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8321 Return NULL_TREE if no simplification can be made. */
8323 static tree
8324 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8326 const char *p1, *p2;
8328 if (!validate_arg (arg1, POINTER_TYPE)
8329 || !validate_arg (arg2, POINTER_TYPE)
8330 || !validate_arg (len, INTEGER_TYPE))
8331 return NULL_TREE;
8333 /* If the LEN parameter is zero, return zero. */
8334 if (integer_zerop (len))
8335 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8336 arg1, arg2);
8338 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8339 if (operand_equal_p (arg1, arg2, 0))
8340 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8342 p1 = c_getstr (arg1);
8343 p2 = c_getstr (arg2);
8345 /* If all arguments are constant, and the value of len is not greater
8346 than the lengths of arg1 and arg2, evaluate at compile-time. */
8347 if (host_integerp (len, 1) && p1 && p2
8348 && compare_tree_int (len, strlen (p1) + 1) <= 0
8349 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8351 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8353 if (r > 0)
8354 return integer_one_node;
8355 else if (r < 0)
8356 return integer_minus_one_node;
8357 else
8358 return integer_zero_node;
8361 /* If len parameter is one, return an expression corresponding to
8362 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8363 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8365 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8366 tree cst_uchar_ptr_node
8367 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8369 tree ind1
8370 = fold_convert_loc (loc, integer_type_node,
8371 build1 (INDIRECT_REF, cst_uchar_node,
8372 fold_convert_loc (loc,
8373 cst_uchar_ptr_node,
8374 arg1)));
8375 tree ind2
8376 = fold_convert_loc (loc, integer_type_node,
8377 build1 (INDIRECT_REF, cst_uchar_node,
8378 fold_convert_loc (loc,
8379 cst_uchar_ptr_node,
8380 arg2)));
8381 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8384 return NULL_TREE;
8387 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8388 Return NULL_TREE if no simplification can be made. */
8390 static tree
8391 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8393 const char *p1, *p2;
8395 if (!validate_arg (arg1, POINTER_TYPE)
8396 || !validate_arg (arg2, POINTER_TYPE))
8397 return NULL_TREE;
8399 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8400 if (operand_equal_p (arg1, arg2, 0))
8401 return integer_zero_node;
8403 p1 = c_getstr (arg1);
8404 p2 = c_getstr (arg2);
8406 if (p1 && p2)
8408 const int i = strcmp (p1, p2);
8409 if (i < 0)
8410 return integer_minus_one_node;
8411 else if (i > 0)
8412 return integer_one_node;
8413 else
8414 return integer_zero_node;
8417 /* If the second arg is "", return *(const unsigned char*)arg1. */
8418 if (p2 && *p2 == '\0')
8420 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8421 tree cst_uchar_ptr_node
8422 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8424 return fold_convert_loc (loc, integer_type_node,
8425 build1 (INDIRECT_REF, cst_uchar_node,
8426 fold_convert_loc (loc,
8427 cst_uchar_ptr_node,
8428 arg1)));
8431 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8432 if (p1 && *p1 == '\0')
8434 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8435 tree cst_uchar_ptr_node
8436 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8438 tree temp
8439 = fold_convert_loc (loc, integer_type_node,
8440 build1 (INDIRECT_REF, cst_uchar_node,
8441 fold_convert_loc (loc,
8442 cst_uchar_ptr_node,
8443 arg2)));
8444 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8447 return NULL_TREE;
8450 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8451 Return NULL_TREE if no simplification can be made. */
8453 static tree
8454 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8456 const char *p1, *p2;
8458 if (!validate_arg (arg1, POINTER_TYPE)
8459 || !validate_arg (arg2, POINTER_TYPE)
8460 || !validate_arg (len, INTEGER_TYPE))
8461 return NULL_TREE;
8463 /* If the LEN parameter is zero, return zero. */
8464 if (integer_zerop (len))
8465 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8466 arg1, arg2);
8468 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8469 if (operand_equal_p (arg1, arg2, 0))
8470 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8472 p1 = c_getstr (arg1);
8473 p2 = c_getstr (arg2);
8475 if (host_integerp (len, 1) && p1 && p2)
8477 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8478 if (i > 0)
8479 return integer_one_node;
8480 else if (i < 0)
8481 return integer_minus_one_node;
8482 else
8483 return integer_zero_node;
8486 /* If the second arg is "", and the length is greater than zero,
8487 return *(const unsigned char*)arg1. */
8488 if (p2 && *p2 == '\0'
8489 && TREE_CODE (len) == INTEGER_CST
8490 && tree_int_cst_sgn (len) == 1)
8492 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8493 tree cst_uchar_ptr_node
8494 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8496 return fold_convert_loc (loc, integer_type_node,
8497 build1 (INDIRECT_REF, cst_uchar_node,
8498 fold_convert_loc (loc,
8499 cst_uchar_ptr_node,
8500 arg1)));
8503 /* If the first arg is "", and the length is greater than zero,
8504 return -*(const unsigned char*)arg2. */
8505 if (p1 && *p1 == '\0'
8506 && TREE_CODE (len) == INTEGER_CST
8507 && tree_int_cst_sgn (len) == 1)
8509 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8510 tree cst_uchar_ptr_node
8511 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8513 tree temp = fold_convert_loc (loc, integer_type_node,
8514 build1 (INDIRECT_REF, cst_uchar_node,
8515 fold_convert_loc (loc,
8516 cst_uchar_ptr_node,
8517 arg2)));
8518 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8521 /* If len parameter is one, return an expression corresponding to
8522 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8523 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8525 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8526 tree cst_uchar_ptr_node
8527 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8529 tree ind1 = fold_convert_loc (loc, integer_type_node,
8530 build1 (INDIRECT_REF, cst_uchar_node,
8531 fold_convert_loc (loc,
8532 cst_uchar_ptr_node,
8533 arg1)));
8534 tree ind2 = fold_convert_loc (loc, integer_type_node,
8535 build1 (INDIRECT_REF, cst_uchar_node,
8536 fold_convert_loc (loc,
8537 cst_uchar_ptr_node,
8538 arg2)));
8539 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8542 return NULL_TREE;
8545 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8546 ARG. Return NULL_TREE if no simplification can be made. */
8548 static tree
8549 fold_builtin_signbit (location_t loc, tree arg, tree type)
8551 if (!validate_arg (arg, REAL_TYPE))
8552 return NULL_TREE;
8554 /* If ARG is a compile-time constant, determine the result. */
8555 if (TREE_CODE (arg) == REAL_CST
8556 && !TREE_OVERFLOW (arg))
8558 REAL_VALUE_TYPE c;
8560 c = TREE_REAL_CST (arg);
8561 return (REAL_VALUE_NEGATIVE (c)
8562 ? build_one_cst (type)
8563 : build_zero_cst (type));
8566 /* If ARG is non-negative, the result is always zero. */
8567 if (tree_expr_nonnegative_p (arg))
8568 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8570 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8571 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8572 return fold_build2_loc (loc, LT_EXPR, type, arg,
8573 build_real (TREE_TYPE (arg), dconst0));
8575 return NULL_TREE;
8578 /* Fold function call to builtin copysign, copysignf or copysignl with
8579 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8580 be made. */
8582 static tree
8583 fold_builtin_copysign (location_t loc, tree fndecl,
8584 tree arg1, tree arg2, tree type)
8586 tree tem;
8588 if (!validate_arg (arg1, REAL_TYPE)
8589 || !validate_arg (arg2, REAL_TYPE))
8590 return NULL_TREE;
8592 /* copysign(X,X) is X. */
8593 if (operand_equal_p (arg1, arg2, 0))
8594 return fold_convert_loc (loc, type, arg1);
8596 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8597 if (TREE_CODE (arg1) == REAL_CST
8598 && TREE_CODE (arg2) == REAL_CST
8599 && !TREE_OVERFLOW (arg1)
8600 && !TREE_OVERFLOW (arg2))
8602 REAL_VALUE_TYPE c1, c2;
8604 c1 = TREE_REAL_CST (arg1);
8605 c2 = TREE_REAL_CST (arg2);
8606 /* c1.sign := c2.sign. */
8607 real_copysign (&c1, &c2);
8608 return build_real (type, c1);
8611 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8612 Remember to evaluate Y for side-effects. */
8613 if (tree_expr_nonnegative_p (arg2))
8614 return omit_one_operand_loc (loc, type,
8615 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8616 arg2);
8618 /* Strip sign changing operations for the first argument. */
8619 tem = fold_strip_sign_ops (arg1);
8620 if (tem)
8621 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8623 return NULL_TREE;
8626 /* Fold a call to builtin isascii with argument ARG. */
8628 static tree
8629 fold_builtin_isascii (location_t loc, tree arg)
8631 if (!validate_arg (arg, INTEGER_TYPE))
8632 return NULL_TREE;
8633 else
8635 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8636 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8637 build_int_cst (integer_type_node,
8638 ~ (unsigned HOST_WIDE_INT) 0x7f));
8639 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8640 arg, integer_zero_node);
8644 /* Fold a call to builtin toascii with argument ARG. */
8646 static tree
8647 fold_builtin_toascii (location_t loc, tree arg)
8649 if (!validate_arg (arg, INTEGER_TYPE))
8650 return NULL_TREE;
8652 /* Transform toascii(c) -> (c & 0x7f). */
8653 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8654 build_int_cst (integer_type_node, 0x7f));
8657 /* Fold a call to builtin isdigit with argument ARG. */
8659 static tree
8660 fold_builtin_isdigit (location_t loc, tree arg)
8662 if (!validate_arg (arg, INTEGER_TYPE))
8663 return NULL_TREE;
8664 else
8666 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8667 /* According to the C standard, isdigit is unaffected by locale.
8668 However, it definitely is affected by the target character set. */
8669 unsigned HOST_WIDE_INT target_digit0
8670 = lang_hooks.to_target_charset ('0');
8672 if (target_digit0 == 0)
8673 return NULL_TREE;
8675 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8676 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8677 build_int_cst (unsigned_type_node, target_digit0));
8678 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8679 build_int_cst (unsigned_type_node, 9));
8683 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8685 static tree
8686 fold_builtin_fabs (location_t loc, tree arg, tree type)
8688 if (!validate_arg (arg, REAL_TYPE))
8689 return NULL_TREE;
8691 arg = fold_convert_loc (loc, type, arg);
8692 if (TREE_CODE (arg) == REAL_CST)
8693 return fold_abs_const (arg, type);
8694 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8697 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8699 static tree
8700 fold_builtin_abs (location_t loc, tree arg, tree type)
8702 if (!validate_arg (arg, INTEGER_TYPE))
8703 return NULL_TREE;
8705 arg = fold_convert_loc (loc, type, arg);
8706 if (TREE_CODE (arg) == INTEGER_CST)
8707 return fold_abs_const (arg, type);
8708 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8711 /* Fold a fma operation with arguments ARG[012]. */
8713 tree
8714 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8715 tree type, tree arg0, tree arg1, tree arg2)
8717 if (TREE_CODE (arg0) == REAL_CST
8718 && TREE_CODE (arg1) == REAL_CST
8719 && TREE_CODE (arg2) == REAL_CST)
8720 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8722 return NULL_TREE;
8725 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8727 static tree
8728 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8730 if (validate_arg (arg0, REAL_TYPE)
8731 && validate_arg(arg1, REAL_TYPE)
8732 && validate_arg(arg2, REAL_TYPE))
8734 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
8735 if (tem)
8736 return tem;
8738 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8739 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8740 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8742 return NULL_TREE;
8745 /* Fold a call to builtin fmin or fmax. */
8747 static tree
8748 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8749 tree type, bool max)
8751 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8753 /* Calculate the result when the argument is a constant. */
8754 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8756 if (res)
8757 return res;
8759 /* If either argument is NaN, return the other one. Avoid the
8760 transformation if we get (and honor) a signalling NaN. Using
8761 omit_one_operand() ensures we create a non-lvalue. */
8762 if (TREE_CODE (arg0) == REAL_CST
8763 && real_isnan (&TREE_REAL_CST (arg0))
8764 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8765 || ! TREE_REAL_CST (arg0).signalling))
8766 return omit_one_operand_loc (loc, type, arg1, arg0);
8767 if (TREE_CODE (arg1) == REAL_CST
8768 && real_isnan (&TREE_REAL_CST (arg1))
8769 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
8770 || ! TREE_REAL_CST (arg1).signalling))
8771 return omit_one_operand_loc (loc, type, arg0, arg1);
8773 /* Transform fmin/fmax(x,x) -> x. */
8774 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8775 return omit_one_operand_loc (loc, type, arg0, arg1);
8777 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8778 functions to return the numeric arg if the other one is NaN.
8779 These tree codes don't honor that, so only transform if
8780 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8781 handled, so we don't have to worry about it either. */
8782 if (flag_finite_math_only)
8783 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
8784 fold_convert_loc (loc, type, arg0),
8785 fold_convert_loc (loc, type, arg1));
8787 return NULL_TREE;
8790 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8792 static tree
8793 fold_builtin_carg (location_t loc, tree arg, tree type)
8795 if (validate_arg (arg, COMPLEX_TYPE)
8796 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8798 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8800 if (atan2_fn)
8802 tree new_arg = builtin_save_expr (arg);
8803 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8804 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8805 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8809 return NULL_TREE;
8812 /* Fold a call to builtin logb/ilogb. */
8814 static tree
8815 fold_builtin_logb (location_t loc, tree arg, tree rettype)
8817 if (! validate_arg (arg, REAL_TYPE))
8818 return NULL_TREE;
8820 STRIP_NOPS (arg);
8822 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8824 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8826 switch (value->cl)
8828 case rvc_nan:
8829 case rvc_inf:
8830 /* If arg is Inf or NaN and we're logb, return it. */
8831 if (TREE_CODE (rettype) == REAL_TYPE)
8832 return fold_convert_loc (loc, rettype, arg);
8833 /* Fall through... */
8834 case rvc_zero:
8835 /* Zero may set errno and/or raise an exception for logb, also
8836 for ilogb we don't know FP_ILOGB0. */
8837 return NULL_TREE;
8838 case rvc_normal:
8839 /* For normal numbers, proceed iff radix == 2. In GCC,
8840 normalized significands are in the range [0.5, 1.0). We
8841 want the exponent as if they were [1.0, 2.0) so get the
8842 exponent and subtract 1. */
8843 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8844 return fold_convert_loc (loc, rettype,
8845 build_int_cst (integer_type_node,
8846 REAL_EXP (value)-1));
8847 break;
8851 return NULL_TREE;
8854 /* Fold a call to builtin significand, if radix == 2. */
8856 static tree
8857 fold_builtin_significand (location_t loc, tree arg, tree rettype)
8859 if (! validate_arg (arg, REAL_TYPE))
8860 return NULL_TREE;
8862 STRIP_NOPS (arg);
8864 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8866 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8868 switch (value->cl)
8870 case rvc_zero:
8871 case rvc_nan:
8872 case rvc_inf:
8873 /* If arg is +-0, +-Inf or +-NaN, then return it. */
8874 return fold_convert_loc (loc, rettype, arg);
8875 case rvc_normal:
8876 /* For normal numbers, proceed iff radix == 2. */
8877 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8879 REAL_VALUE_TYPE result = *value;
8880 /* In GCC, normalized significands are in the range [0.5,
8881 1.0). We want them to be [1.0, 2.0) so set the
8882 exponent to 1. */
8883 SET_REAL_EXP (&result, 1);
8884 return build_real (rettype, result);
8886 break;
8890 return NULL_TREE;
8893 /* Fold a call to builtin frexp, we can assume the base is 2. */
8895 static tree
8896 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8898 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8899 return NULL_TREE;
8901 STRIP_NOPS (arg0);
8903 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8904 return NULL_TREE;
8906 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8908 /* Proceed if a valid pointer type was passed in. */
8909 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8911 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8912 tree frac, exp;
8914 switch (value->cl)
8916 case rvc_zero:
8917 /* For +-0, return (*exp = 0, +-0). */
8918 exp = integer_zero_node;
8919 frac = arg0;
8920 break;
8921 case rvc_nan:
8922 case rvc_inf:
8923 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8924 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8925 case rvc_normal:
8927 /* Since the frexp function always expects base 2, and in
8928 GCC normalized significands are already in the range
8929 [0.5, 1.0), we have exactly what frexp wants. */
8930 REAL_VALUE_TYPE frac_rvt = *value;
8931 SET_REAL_EXP (&frac_rvt, 0);
8932 frac = build_real (rettype, frac_rvt);
8933 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8935 break;
8936 default:
8937 gcc_unreachable ();
8940 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8941 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8942 TREE_SIDE_EFFECTS (arg1) = 1;
8943 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8946 return NULL_TREE;
8949 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
8950 then we can assume the base is two. If it's false, then we have to
8951 check the mode of the TYPE parameter in certain cases. */
8953 static tree
8954 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
8955 tree type, bool ldexp)
8957 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
8959 STRIP_NOPS (arg0);
8960 STRIP_NOPS (arg1);
8962 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
8963 if (real_zerop (arg0) || integer_zerop (arg1)
8964 || (TREE_CODE (arg0) == REAL_CST
8965 && !real_isfinite (&TREE_REAL_CST (arg0))))
8966 return omit_one_operand_loc (loc, type, arg0, arg1);
8968 /* If both arguments are constant, then try to evaluate it. */
8969 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
8970 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
8971 && host_integerp (arg1, 0))
8973 /* Bound the maximum adjustment to twice the range of the
8974 mode's valid exponents. Use abs to ensure the range is
8975 positive as a sanity check. */
8976 const long max_exp_adj = 2 *
8977 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
8978 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
8980 /* Get the user-requested adjustment. */
8981 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
8983 /* The requested adjustment must be inside this range. This
8984 is a preliminary cap to avoid things like overflow, we
8985 may still fail to compute the result for other reasons. */
8986 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
8988 REAL_VALUE_TYPE initial_result;
8990 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
8992 /* Ensure we didn't overflow. */
8993 if (! real_isinf (&initial_result))
8995 const REAL_VALUE_TYPE trunc_result
8996 = real_value_truncate (TYPE_MODE (type), initial_result);
8998 /* Only proceed if the target mode can hold the
8999 resulting value. */
9000 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9001 return build_real (type, trunc_result);
9007 return NULL_TREE;
9010 /* Fold a call to builtin modf. */
9012 static tree
9013 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9015 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9016 return NULL_TREE;
9018 STRIP_NOPS (arg0);
9020 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9021 return NULL_TREE;
9023 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9025 /* Proceed if a valid pointer type was passed in. */
9026 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9028 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9029 REAL_VALUE_TYPE trunc, frac;
9031 switch (value->cl)
9033 case rvc_nan:
9034 case rvc_zero:
9035 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9036 trunc = frac = *value;
9037 break;
9038 case rvc_inf:
9039 /* For +-Inf, return (*arg1 = arg0, +-0). */
9040 frac = dconst0;
9041 frac.sign = value->sign;
9042 trunc = *value;
9043 break;
9044 case rvc_normal:
9045 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9046 real_trunc (&trunc, VOIDmode, value);
9047 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9048 /* If the original number was negative and already
9049 integral, then the fractional part is -0.0. */
9050 if (value->sign && frac.cl == rvc_zero)
9051 frac.sign = value->sign;
9052 break;
9055 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9056 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9057 build_real (rettype, trunc));
9058 TREE_SIDE_EFFECTS (arg1) = 1;
9059 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9060 build_real (rettype, frac));
9063 return NULL_TREE;
9066 /* Given a location LOC, an interclass builtin function decl FNDECL
9067 and its single argument ARG, return an folded expression computing
9068 the same, or NULL_TREE if we either couldn't or didn't want to fold
9069 (the latter happen if there's an RTL instruction available). */
9071 static tree
9072 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9074 enum machine_mode mode;
9076 if (!validate_arg (arg, REAL_TYPE))
9077 return NULL_TREE;
9079 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9080 return NULL_TREE;
9082 mode = TYPE_MODE (TREE_TYPE (arg));
9084 /* If there is no optab, try generic code. */
9085 switch (DECL_FUNCTION_CODE (fndecl))
9087 tree result;
9089 CASE_FLT_FN (BUILT_IN_ISINF):
9091 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9092 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9093 tree const type = TREE_TYPE (arg);
9094 REAL_VALUE_TYPE r;
9095 char buf[128];
9097 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9098 real_from_string (&r, buf);
9099 result = build_call_expr (isgr_fn, 2,
9100 fold_build1_loc (loc, ABS_EXPR, type, arg),
9101 build_real (type, r));
9102 return result;
9104 CASE_FLT_FN (BUILT_IN_FINITE):
9105 case BUILT_IN_ISFINITE:
9107 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9108 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9109 tree const type = TREE_TYPE (arg);
9110 REAL_VALUE_TYPE r;
9111 char buf[128];
9113 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9114 real_from_string (&r, buf);
9115 result = build_call_expr (isle_fn, 2,
9116 fold_build1_loc (loc, ABS_EXPR, type, arg),
9117 build_real (type, r));
9118 /*result = fold_build2_loc (loc, UNGT_EXPR,
9119 TREE_TYPE (TREE_TYPE (fndecl)),
9120 fold_build1_loc (loc, ABS_EXPR, type, arg),
9121 build_real (type, r));
9122 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9123 TREE_TYPE (TREE_TYPE (fndecl)),
9124 result);*/
9125 return result;
9127 case BUILT_IN_ISNORMAL:
9129 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9130 islessequal(fabs(x),DBL_MAX). */
9131 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9132 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9133 tree const type = TREE_TYPE (arg);
9134 REAL_VALUE_TYPE rmax, rmin;
9135 char buf[128];
9137 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9138 real_from_string (&rmax, buf);
9139 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9140 real_from_string (&rmin, buf);
9141 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9142 result = build_call_expr (isle_fn, 2, arg,
9143 build_real (type, rmax));
9144 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9145 build_call_expr (isge_fn, 2, arg,
9146 build_real (type, rmin)));
9147 return result;
9149 default:
9150 break;
9153 return NULL_TREE;
9156 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9157 ARG is the argument for the call. */
9159 static tree
9160 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9162 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9163 REAL_VALUE_TYPE r;
9165 if (!validate_arg (arg, REAL_TYPE))
9166 return NULL_TREE;
9168 switch (builtin_index)
9170 case BUILT_IN_ISINF:
9171 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9172 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9174 if (TREE_CODE (arg) == REAL_CST)
9176 r = TREE_REAL_CST (arg);
9177 if (real_isinf (&r))
9178 return real_compare (GT_EXPR, &r, &dconst0)
9179 ? integer_one_node : integer_minus_one_node;
9180 else
9181 return integer_zero_node;
9184 return NULL_TREE;
9186 case BUILT_IN_ISINF_SIGN:
9188 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9189 /* In a boolean context, GCC will fold the inner COND_EXPR to
9190 1. So e.g. "if (isinf_sign(x))" would be folded to just
9191 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9192 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9193 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9194 tree tmp = NULL_TREE;
9196 arg = builtin_save_expr (arg);
9198 if (signbit_fn && isinf_fn)
9200 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9201 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9203 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9204 signbit_call, integer_zero_node);
9205 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9206 isinf_call, integer_zero_node);
9208 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9209 integer_minus_one_node, integer_one_node);
9210 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9211 isinf_call, tmp,
9212 integer_zero_node);
9215 return tmp;
9218 case BUILT_IN_ISFINITE:
9219 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9220 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9221 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9223 if (TREE_CODE (arg) == REAL_CST)
9225 r = TREE_REAL_CST (arg);
9226 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9229 return NULL_TREE;
9231 case BUILT_IN_ISNAN:
9232 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9233 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9235 if (TREE_CODE (arg) == REAL_CST)
9237 r = TREE_REAL_CST (arg);
9238 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9241 arg = builtin_save_expr (arg);
9242 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9244 default:
9245 gcc_unreachable ();
9249 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9250 This builtin will generate code to return the appropriate floating
9251 point classification depending on the value of the floating point
9252 number passed in. The possible return values must be supplied as
9253 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9254 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9255 one floating point argument which is "type generic". */
9257 static tree
9258 fold_builtin_fpclassify (location_t loc, tree exp)
9260 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9261 arg, type, res, tmp;
9262 enum machine_mode mode;
9263 REAL_VALUE_TYPE r;
9264 char buf[128];
9266 /* Verify the required arguments in the original call. */
9267 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9268 INTEGER_TYPE, INTEGER_TYPE,
9269 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9270 return NULL_TREE;
9272 fp_nan = CALL_EXPR_ARG (exp, 0);
9273 fp_infinite = CALL_EXPR_ARG (exp, 1);
9274 fp_normal = CALL_EXPR_ARG (exp, 2);
9275 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9276 fp_zero = CALL_EXPR_ARG (exp, 4);
9277 arg = CALL_EXPR_ARG (exp, 5);
9278 type = TREE_TYPE (arg);
9279 mode = TYPE_MODE (type);
9280 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9282 /* fpclassify(x) ->
9283 isnan(x) ? FP_NAN :
9284 (fabs(x) == Inf ? FP_INFINITE :
9285 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9286 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9288 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9289 build_real (type, dconst0));
9290 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9291 tmp, fp_zero, fp_subnormal);
9293 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9294 real_from_string (&r, buf);
9295 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9296 arg, build_real (type, r));
9297 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9299 if (HONOR_INFINITIES (mode))
9301 real_inf (&r);
9302 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9303 build_real (type, r));
9304 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9305 fp_infinite, res);
9308 if (HONOR_NANS (mode))
9310 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9311 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9314 return res;
9317 /* Fold a call to an unordered comparison function such as
9318 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9319 being called and ARG0 and ARG1 are the arguments for the call.
9320 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9321 the opposite of the desired result. UNORDERED_CODE is used
9322 for modes that can hold NaNs and ORDERED_CODE is used for
9323 the rest. */
9325 static tree
9326 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9327 enum tree_code unordered_code,
9328 enum tree_code ordered_code)
9330 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9331 enum tree_code code;
9332 tree type0, type1;
9333 enum tree_code code0, code1;
9334 tree cmp_type = NULL_TREE;
9336 type0 = TREE_TYPE (arg0);
9337 type1 = TREE_TYPE (arg1);
9339 code0 = TREE_CODE (type0);
9340 code1 = TREE_CODE (type1);
9342 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9343 /* Choose the wider of two real types. */
9344 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9345 ? type0 : type1;
9346 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9347 cmp_type = type0;
9348 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9349 cmp_type = type1;
9351 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9352 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9354 if (unordered_code == UNORDERED_EXPR)
9356 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9357 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9358 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9361 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9362 : ordered_code;
9363 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9364 fold_build2_loc (loc, code, type, arg0, arg1));
9367 /* Fold a call to built-in function FNDECL with 0 arguments.
9368 IGNORE is true if the result of the function call is ignored. This
9369 function returns NULL_TREE if no simplification was possible. */
9371 static tree
9372 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9374 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9375 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9376 switch (fcode)
9378 CASE_FLT_FN (BUILT_IN_INF):
9379 case BUILT_IN_INFD32:
9380 case BUILT_IN_INFD64:
9381 case BUILT_IN_INFD128:
9382 return fold_builtin_inf (loc, type, true);
9384 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9385 return fold_builtin_inf (loc, type, false);
9387 case BUILT_IN_CLASSIFY_TYPE:
9388 return fold_builtin_classify_type (NULL_TREE);
9390 default:
9391 break;
9393 return NULL_TREE;
9396 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9397 IGNORE is true if the result of the function call is ignored. This
9398 function returns NULL_TREE if no simplification was possible. */
9400 static tree
9401 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9403 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9404 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9405 switch (fcode)
9407 case BUILT_IN_CONSTANT_P:
9409 tree val = fold_builtin_constant_p (arg0);
9411 /* Gimplification will pull the CALL_EXPR for the builtin out of
9412 an if condition. When not optimizing, we'll not CSE it back.
9413 To avoid link error types of regressions, return false now. */
9414 if (!val && !optimize)
9415 val = integer_zero_node;
9417 return val;
9420 case BUILT_IN_CLASSIFY_TYPE:
9421 return fold_builtin_classify_type (arg0);
9423 case BUILT_IN_STRLEN:
9424 return fold_builtin_strlen (loc, type, arg0);
9426 CASE_FLT_FN (BUILT_IN_FABS):
9427 return fold_builtin_fabs (loc, arg0, type);
9429 case BUILT_IN_ABS:
9430 case BUILT_IN_LABS:
9431 case BUILT_IN_LLABS:
9432 case BUILT_IN_IMAXABS:
9433 return fold_builtin_abs (loc, arg0, type);
9435 CASE_FLT_FN (BUILT_IN_CONJ):
9436 if (validate_arg (arg0, COMPLEX_TYPE)
9437 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9438 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9439 break;
9441 CASE_FLT_FN (BUILT_IN_CREAL):
9442 if (validate_arg (arg0, COMPLEX_TYPE)
9443 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9444 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9445 break;
9447 CASE_FLT_FN (BUILT_IN_CIMAG):
9448 if (validate_arg (arg0, COMPLEX_TYPE)
9449 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9450 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9451 break;
9453 CASE_FLT_FN (BUILT_IN_CCOS):
9454 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9456 CASE_FLT_FN (BUILT_IN_CCOSH):
9457 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9459 CASE_FLT_FN (BUILT_IN_CPROJ):
9460 return fold_builtin_cproj(loc, arg0, type);
9462 CASE_FLT_FN (BUILT_IN_CSIN):
9463 if (validate_arg (arg0, COMPLEX_TYPE)
9464 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9465 return do_mpc_arg1 (arg0, type, mpc_sin);
9466 break;
9468 CASE_FLT_FN (BUILT_IN_CSINH):
9469 if (validate_arg (arg0, COMPLEX_TYPE)
9470 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9471 return do_mpc_arg1 (arg0, type, mpc_sinh);
9472 break;
9474 CASE_FLT_FN (BUILT_IN_CTAN):
9475 if (validate_arg (arg0, COMPLEX_TYPE)
9476 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9477 return do_mpc_arg1 (arg0, type, mpc_tan);
9478 break;
9480 CASE_FLT_FN (BUILT_IN_CTANH):
9481 if (validate_arg (arg0, COMPLEX_TYPE)
9482 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9483 return do_mpc_arg1 (arg0, type, mpc_tanh);
9484 break;
9486 CASE_FLT_FN (BUILT_IN_CLOG):
9487 if (validate_arg (arg0, COMPLEX_TYPE)
9488 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9489 return do_mpc_arg1 (arg0, type, mpc_log);
9490 break;
9492 CASE_FLT_FN (BUILT_IN_CSQRT):
9493 if (validate_arg (arg0, COMPLEX_TYPE)
9494 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9495 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9496 break;
9498 CASE_FLT_FN (BUILT_IN_CASIN):
9499 if (validate_arg (arg0, COMPLEX_TYPE)
9500 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9501 return do_mpc_arg1 (arg0, type, mpc_asin);
9502 break;
9504 CASE_FLT_FN (BUILT_IN_CACOS):
9505 if (validate_arg (arg0, COMPLEX_TYPE)
9506 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9507 return do_mpc_arg1 (arg0, type, mpc_acos);
9508 break;
9510 CASE_FLT_FN (BUILT_IN_CATAN):
9511 if (validate_arg (arg0, COMPLEX_TYPE)
9512 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9513 return do_mpc_arg1 (arg0, type, mpc_atan);
9514 break;
9516 CASE_FLT_FN (BUILT_IN_CASINH):
9517 if (validate_arg (arg0, COMPLEX_TYPE)
9518 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9519 return do_mpc_arg1 (arg0, type, mpc_asinh);
9520 break;
9522 CASE_FLT_FN (BUILT_IN_CACOSH):
9523 if (validate_arg (arg0, COMPLEX_TYPE)
9524 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9525 return do_mpc_arg1 (arg0, type, mpc_acosh);
9526 break;
9528 CASE_FLT_FN (BUILT_IN_CATANH):
9529 if (validate_arg (arg0, COMPLEX_TYPE)
9530 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9531 return do_mpc_arg1 (arg0, type, mpc_atanh);
9532 break;
9534 CASE_FLT_FN (BUILT_IN_CABS):
9535 return fold_builtin_cabs (loc, arg0, type, fndecl);
9537 CASE_FLT_FN (BUILT_IN_CARG):
9538 return fold_builtin_carg (loc, arg0, type);
9540 CASE_FLT_FN (BUILT_IN_SQRT):
9541 return fold_builtin_sqrt (loc, arg0, type);
9543 CASE_FLT_FN (BUILT_IN_CBRT):
9544 return fold_builtin_cbrt (loc, arg0, type);
9546 CASE_FLT_FN (BUILT_IN_ASIN):
9547 if (validate_arg (arg0, REAL_TYPE))
9548 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9549 &dconstm1, &dconst1, true);
9550 break;
9552 CASE_FLT_FN (BUILT_IN_ACOS):
9553 if (validate_arg (arg0, REAL_TYPE))
9554 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9555 &dconstm1, &dconst1, true);
9556 break;
9558 CASE_FLT_FN (BUILT_IN_ATAN):
9559 if (validate_arg (arg0, REAL_TYPE))
9560 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9561 break;
9563 CASE_FLT_FN (BUILT_IN_ASINH):
9564 if (validate_arg (arg0, REAL_TYPE))
9565 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9566 break;
9568 CASE_FLT_FN (BUILT_IN_ACOSH):
9569 if (validate_arg (arg0, REAL_TYPE))
9570 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9571 &dconst1, NULL, true);
9572 break;
9574 CASE_FLT_FN (BUILT_IN_ATANH):
9575 if (validate_arg (arg0, REAL_TYPE))
9576 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9577 &dconstm1, &dconst1, false);
9578 break;
9580 CASE_FLT_FN (BUILT_IN_SIN):
9581 if (validate_arg (arg0, REAL_TYPE))
9582 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9583 break;
9585 CASE_FLT_FN (BUILT_IN_COS):
9586 return fold_builtin_cos (loc, arg0, type, fndecl);
9588 CASE_FLT_FN (BUILT_IN_TAN):
9589 return fold_builtin_tan (arg0, type);
9591 CASE_FLT_FN (BUILT_IN_CEXP):
9592 return fold_builtin_cexp (loc, arg0, type);
9594 CASE_FLT_FN (BUILT_IN_CEXPI):
9595 if (validate_arg (arg0, REAL_TYPE))
9596 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9597 break;
9599 CASE_FLT_FN (BUILT_IN_SINH):
9600 if (validate_arg (arg0, REAL_TYPE))
9601 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9602 break;
9604 CASE_FLT_FN (BUILT_IN_COSH):
9605 return fold_builtin_cosh (loc, arg0, type, fndecl);
9607 CASE_FLT_FN (BUILT_IN_TANH):
9608 if (validate_arg (arg0, REAL_TYPE))
9609 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9610 break;
9612 CASE_FLT_FN (BUILT_IN_ERF):
9613 if (validate_arg (arg0, REAL_TYPE))
9614 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9615 break;
9617 CASE_FLT_FN (BUILT_IN_ERFC):
9618 if (validate_arg (arg0, REAL_TYPE))
9619 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9620 break;
9622 CASE_FLT_FN (BUILT_IN_TGAMMA):
9623 if (validate_arg (arg0, REAL_TYPE))
9624 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9625 break;
9627 CASE_FLT_FN (BUILT_IN_EXP):
9628 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9630 CASE_FLT_FN (BUILT_IN_EXP2):
9631 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9633 CASE_FLT_FN (BUILT_IN_EXP10):
9634 CASE_FLT_FN (BUILT_IN_POW10):
9635 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9637 CASE_FLT_FN (BUILT_IN_EXPM1):
9638 if (validate_arg (arg0, REAL_TYPE))
9639 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9640 break;
9642 CASE_FLT_FN (BUILT_IN_LOG):
9643 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9645 CASE_FLT_FN (BUILT_IN_LOG2):
9646 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9648 CASE_FLT_FN (BUILT_IN_LOG10):
9649 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9651 CASE_FLT_FN (BUILT_IN_LOG1P):
9652 if (validate_arg (arg0, REAL_TYPE))
9653 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9654 &dconstm1, NULL, false);
9655 break;
9657 CASE_FLT_FN (BUILT_IN_J0):
9658 if (validate_arg (arg0, REAL_TYPE))
9659 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9660 NULL, NULL, 0);
9661 break;
9663 CASE_FLT_FN (BUILT_IN_J1):
9664 if (validate_arg (arg0, REAL_TYPE))
9665 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9666 NULL, NULL, 0);
9667 break;
9669 CASE_FLT_FN (BUILT_IN_Y0):
9670 if (validate_arg (arg0, REAL_TYPE))
9671 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9672 &dconst0, NULL, false);
9673 break;
9675 CASE_FLT_FN (BUILT_IN_Y1):
9676 if (validate_arg (arg0, REAL_TYPE))
9677 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9678 &dconst0, NULL, false);
9679 break;
9681 CASE_FLT_FN (BUILT_IN_NAN):
9682 case BUILT_IN_NAND32:
9683 case BUILT_IN_NAND64:
9684 case BUILT_IN_NAND128:
9685 return fold_builtin_nan (arg0, type, true);
9687 CASE_FLT_FN (BUILT_IN_NANS):
9688 return fold_builtin_nan (arg0, type, false);
9690 CASE_FLT_FN (BUILT_IN_FLOOR):
9691 return fold_builtin_floor (loc, fndecl, arg0);
9693 CASE_FLT_FN (BUILT_IN_CEIL):
9694 return fold_builtin_ceil (loc, fndecl, arg0);
9696 CASE_FLT_FN (BUILT_IN_TRUNC):
9697 return fold_builtin_trunc (loc, fndecl, arg0);
9699 CASE_FLT_FN (BUILT_IN_ROUND):
9700 return fold_builtin_round (loc, fndecl, arg0);
9702 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9703 CASE_FLT_FN (BUILT_IN_RINT):
9704 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9706 CASE_FLT_FN (BUILT_IN_LCEIL):
9707 CASE_FLT_FN (BUILT_IN_LLCEIL):
9708 CASE_FLT_FN (BUILT_IN_LFLOOR):
9709 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9710 CASE_FLT_FN (BUILT_IN_LROUND):
9711 CASE_FLT_FN (BUILT_IN_LLROUND):
9712 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9714 CASE_FLT_FN (BUILT_IN_LRINT):
9715 CASE_FLT_FN (BUILT_IN_LLRINT):
9716 return fold_fixed_mathfn (loc, fndecl, arg0);
9718 case BUILT_IN_BSWAP32:
9719 case BUILT_IN_BSWAP64:
9720 return fold_builtin_bswap (fndecl, arg0);
9722 CASE_INT_FN (BUILT_IN_FFS):
9723 CASE_INT_FN (BUILT_IN_CLZ):
9724 CASE_INT_FN (BUILT_IN_CTZ):
9725 CASE_INT_FN (BUILT_IN_POPCOUNT):
9726 CASE_INT_FN (BUILT_IN_PARITY):
9727 return fold_builtin_bitop (fndecl, arg0);
9729 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9730 return fold_builtin_signbit (loc, arg0, type);
9732 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9733 return fold_builtin_significand (loc, arg0, type);
9735 CASE_FLT_FN (BUILT_IN_ILOGB):
9736 CASE_FLT_FN (BUILT_IN_LOGB):
9737 return fold_builtin_logb (loc, arg0, type);
9739 case BUILT_IN_ISASCII:
9740 return fold_builtin_isascii (loc, arg0);
9742 case BUILT_IN_TOASCII:
9743 return fold_builtin_toascii (loc, arg0);
9745 case BUILT_IN_ISDIGIT:
9746 return fold_builtin_isdigit (loc, arg0);
9748 CASE_FLT_FN (BUILT_IN_FINITE):
9749 case BUILT_IN_FINITED32:
9750 case BUILT_IN_FINITED64:
9751 case BUILT_IN_FINITED128:
9752 case BUILT_IN_ISFINITE:
9754 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9755 if (ret)
9756 return ret;
9757 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9760 CASE_FLT_FN (BUILT_IN_ISINF):
9761 case BUILT_IN_ISINFD32:
9762 case BUILT_IN_ISINFD64:
9763 case BUILT_IN_ISINFD128:
9765 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9766 if (ret)
9767 return ret;
9768 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9771 case BUILT_IN_ISNORMAL:
9772 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9774 case BUILT_IN_ISINF_SIGN:
9775 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9777 CASE_FLT_FN (BUILT_IN_ISNAN):
9778 case BUILT_IN_ISNAND32:
9779 case BUILT_IN_ISNAND64:
9780 case BUILT_IN_ISNAND128:
9781 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9783 case BUILT_IN_PRINTF:
9784 case BUILT_IN_PRINTF_UNLOCKED:
9785 case BUILT_IN_VPRINTF:
9786 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
9788 case BUILT_IN_FREE:
9789 if (integer_zerop (arg0))
9790 return build_empty_stmt (loc);
9791 break;
9793 default:
9794 break;
9797 return NULL_TREE;
9801 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9802 IGNORE is true if the result of the function call is ignored. This
9803 function returns NULL_TREE if no simplification was possible. */
9805 static tree
9806 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
9808 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9809 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9811 switch (fcode)
9813 CASE_FLT_FN (BUILT_IN_JN):
9814 if (validate_arg (arg0, INTEGER_TYPE)
9815 && validate_arg (arg1, REAL_TYPE))
9816 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9817 break;
9819 CASE_FLT_FN (BUILT_IN_YN):
9820 if (validate_arg (arg0, INTEGER_TYPE)
9821 && validate_arg (arg1, REAL_TYPE))
9822 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9823 &dconst0, false);
9824 break;
9826 CASE_FLT_FN (BUILT_IN_DREM):
9827 CASE_FLT_FN (BUILT_IN_REMAINDER):
9828 if (validate_arg (arg0, REAL_TYPE)
9829 && validate_arg(arg1, REAL_TYPE))
9830 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9831 break;
9833 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9834 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9835 if (validate_arg (arg0, REAL_TYPE)
9836 && validate_arg(arg1, POINTER_TYPE))
9837 return do_mpfr_lgamma_r (arg0, arg1, type);
9838 break;
9840 CASE_FLT_FN (BUILT_IN_ATAN2):
9841 if (validate_arg (arg0, REAL_TYPE)
9842 && validate_arg(arg1, REAL_TYPE))
9843 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9844 break;
9846 CASE_FLT_FN (BUILT_IN_FDIM):
9847 if (validate_arg (arg0, REAL_TYPE)
9848 && validate_arg(arg1, REAL_TYPE))
9849 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9850 break;
9852 CASE_FLT_FN (BUILT_IN_HYPOT):
9853 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
9855 CASE_FLT_FN (BUILT_IN_CPOW):
9856 if (validate_arg (arg0, COMPLEX_TYPE)
9857 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9858 && validate_arg (arg1, COMPLEX_TYPE)
9859 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
9860 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
9861 break;
9863 CASE_FLT_FN (BUILT_IN_LDEXP):
9864 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
9865 CASE_FLT_FN (BUILT_IN_SCALBN):
9866 CASE_FLT_FN (BUILT_IN_SCALBLN):
9867 return fold_builtin_load_exponent (loc, arg0, arg1,
9868 type, /*ldexp=*/false);
9870 CASE_FLT_FN (BUILT_IN_FREXP):
9871 return fold_builtin_frexp (loc, arg0, arg1, type);
9873 CASE_FLT_FN (BUILT_IN_MODF):
9874 return fold_builtin_modf (loc, arg0, arg1, type);
9876 case BUILT_IN_BZERO:
9877 return fold_builtin_bzero (loc, arg0, arg1, ignore);
9879 case BUILT_IN_FPUTS:
9880 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
9882 case BUILT_IN_FPUTS_UNLOCKED:
9883 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
9885 case BUILT_IN_STRSTR:
9886 return fold_builtin_strstr (loc, arg0, arg1, type);
9888 case BUILT_IN_STRCAT:
9889 return fold_builtin_strcat (loc, arg0, arg1);
9891 case BUILT_IN_STRSPN:
9892 return fold_builtin_strspn (loc, arg0, arg1);
9894 case BUILT_IN_STRCSPN:
9895 return fold_builtin_strcspn (loc, arg0, arg1);
9897 case BUILT_IN_STRCHR:
9898 case BUILT_IN_INDEX:
9899 return fold_builtin_strchr (loc, arg0, arg1, type);
9901 case BUILT_IN_STRRCHR:
9902 case BUILT_IN_RINDEX:
9903 return fold_builtin_strrchr (loc, arg0, arg1, type);
9905 case BUILT_IN_STRCPY:
9906 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
9908 case BUILT_IN_STPCPY:
9909 if (ignore)
9911 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
9912 if (!fn)
9913 break;
9915 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
9917 else
9918 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
9919 break;
9921 case BUILT_IN_STRCMP:
9922 return fold_builtin_strcmp (loc, arg0, arg1);
9924 case BUILT_IN_STRPBRK:
9925 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9927 case BUILT_IN_EXPECT:
9928 return fold_builtin_expect (loc, arg0, arg1);
9930 CASE_FLT_FN (BUILT_IN_POW):
9931 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
9933 CASE_FLT_FN (BUILT_IN_POWI):
9934 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
9936 CASE_FLT_FN (BUILT_IN_COPYSIGN):
9937 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
9939 CASE_FLT_FN (BUILT_IN_FMIN):
9940 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
9942 CASE_FLT_FN (BUILT_IN_FMAX):
9943 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
9945 case BUILT_IN_ISGREATER:
9946 return fold_builtin_unordered_cmp (loc, fndecl,
9947 arg0, arg1, UNLE_EXPR, LE_EXPR);
9948 case BUILT_IN_ISGREATEREQUAL:
9949 return fold_builtin_unordered_cmp (loc, fndecl,
9950 arg0, arg1, UNLT_EXPR, LT_EXPR);
9951 case BUILT_IN_ISLESS:
9952 return fold_builtin_unordered_cmp (loc, fndecl,
9953 arg0, arg1, UNGE_EXPR, GE_EXPR);
9954 case BUILT_IN_ISLESSEQUAL:
9955 return fold_builtin_unordered_cmp (loc, fndecl,
9956 arg0, arg1, UNGT_EXPR, GT_EXPR);
9957 case BUILT_IN_ISLESSGREATER:
9958 return fold_builtin_unordered_cmp (loc, fndecl,
9959 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9960 case BUILT_IN_ISUNORDERED:
9961 return fold_builtin_unordered_cmp (loc, fndecl,
9962 arg0, arg1, UNORDERED_EXPR,
9963 NOP_EXPR);
9965 /* We do the folding for va_start in the expander. */
9966 case BUILT_IN_VA_START:
9967 break;
9969 case BUILT_IN_SPRINTF:
9970 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
9972 case BUILT_IN_OBJECT_SIZE:
9973 return fold_builtin_object_size (arg0, arg1);
9975 case BUILT_IN_PRINTF:
9976 case BUILT_IN_PRINTF_UNLOCKED:
9977 case BUILT_IN_VPRINTF:
9978 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
9980 case BUILT_IN_PRINTF_CHK:
9981 case BUILT_IN_VPRINTF_CHK:
9982 if (!validate_arg (arg0, INTEGER_TYPE)
9983 || TREE_SIDE_EFFECTS (arg0))
9984 return NULL_TREE;
9985 else
9986 return fold_builtin_printf (loc, fndecl,
9987 arg1, NULL_TREE, ignore, fcode);
9988 break;
9990 case BUILT_IN_FPRINTF:
9991 case BUILT_IN_FPRINTF_UNLOCKED:
9992 case BUILT_IN_VFPRINTF:
9993 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
9994 ignore, fcode);
9996 default:
9997 break;
9999 return NULL_TREE;
10002 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10003 and ARG2. IGNORE is true if the result of the function call is ignored.
10004 This function returns NULL_TREE if no simplification was possible. */
10006 static tree
10007 fold_builtin_3 (location_t loc, tree fndecl,
10008 tree arg0, tree arg1, tree arg2, bool ignore)
10010 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10011 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10012 switch (fcode)
10015 CASE_FLT_FN (BUILT_IN_SINCOS):
10016 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10018 CASE_FLT_FN (BUILT_IN_FMA):
10019 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10020 break;
10022 CASE_FLT_FN (BUILT_IN_REMQUO):
10023 if (validate_arg (arg0, REAL_TYPE)
10024 && validate_arg(arg1, REAL_TYPE)
10025 && validate_arg(arg2, POINTER_TYPE))
10026 return do_mpfr_remquo (arg0, arg1, arg2);
10027 break;
10029 case BUILT_IN_MEMSET:
10030 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10032 case BUILT_IN_BCOPY:
10033 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10034 void_type_node, true, /*endp=*/3);
10036 case BUILT_IN_MEMCPY:
10037 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10038 type, ignore, /*endp=*/0);
10040 case BUILT_IN_MEMPCPY:
10041 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10042 type, ignore, /*endp=*/1);
10044 case BUILT_IN_MEMMOVE:
10045 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10046 type, ignore, /*endp=*/3);
10048 case BUILT_IN_STRNCAT:
10049 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10051 case BUILT_IN_STRNCPY:
10052 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10054 case BUILT_IN_STRNCMP:
10055 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10057 case BUILT_IN_MEMCHR:
10058 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10060 case BUILT_IN_BCMP:
10061 case BUILT_IN_MEMCMP:
10062 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10064 case BUILT_IN_SPRINTF:
10065 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10067 case BUILT_IN_SNPRINTF:
10068 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10070 case BUILT_IN_STRCPY_CHK:
10071 case BUILT_IN_STPCPY_CHK:
10072 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10073 ignore, fcode);
10075 case BUILT_IN_STRCAT_CHK:
10076 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10078 case BUILT_IN_PRINTF_CHK:
10079 case BUILT_IN_VPRINTF_CHK:
10080 if (!validate_arg (arg0, INTEGER_TYPE)
10081 || TREE_SIDE_EFFECTS (arg0))
10082 return NULL_TREE;
10083 else
10084 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10085 break;
10087 case BUILT_IN_FPRINTF:
10088 case BUILT_IN_FPRINTF_UNLOCKED:
10089 case BUILT_IN_VFPRINTF:
10090 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10091 ignore, fcode);
10093 case BUILT_IN_FPRINTF_CHK:
10094 case BUILT_IN_VFPRINTF_CHK:
10095 if (!validate_arg (arg1, INTEGER_TYPE)
10096 || TREE_SIDE_EFFECTS (arg1))
10097 return NULL_TREE;
10098 else
10099 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10100 ignore, fcode);
10102 default:
10103 break;
10105 return NULL_TREE;
10108 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10109 ARG2, and ARG3. IGNORE is true if the result of the function call is
10110 ignored. This function returns NULL_TREE if no simplification was
10111 possible. */
10113 static tree
10114 fold_builtin_4 (location_t loc, tree fndecl,
10115 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10117 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10119 switch (fcode)
10121 case BUILT_IN_MEMCPY_CHK:
10122 case BUILT_IN_MEMPCPY_CHK:
10123 case BUILT_IN_MEMMOVE_CHK:
10124 case BUILT_IN_MEMSET_CHK:
10125 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10126 NULL_TREE, ignore,
10127 DECL_FUNCTION_CODE (fndecl));
10129 case BUILT_IN_STRNCPY_CHK:
10130 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10132 case BUILT_IN_STRNCAT_CHK:
10133 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10135 case BUILT_IN_SNPRINTF:
10136 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10138 case BUILT_IN_FPRINTF_CHK:
10139 case BUILT_IN_VFPRINTF_CHK:
10140 if (!validate_arg (arg1, INTEGER_TYPE)
10141 || TREE_SIDE_EFFECTS (arg1))
10142 return NULL_TREE;
10143 else
10144 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10145 ignore, fcode);
10146 break;
10148 default:
10149 break;
10151 return NULL_TREE;
10154 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10155 arguments, where NARGS <= 4. IGNORE is true if the result of the
10156 function call is ignored. This function returns NULL_TREE if no
10157 simplification was possible. Note that this only folds builtins with
10158 fixed argument patterns. Foldings that do varargs-to-varargs
10159 transformations, or that match calls with more than 4 arguments,
10160 need to be handled with fold_builtin_varargs instead. */
10162 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10164 static tree
10165 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10167 tree ret = NULL_TREE;
10169 switch (nargs)
10171 case 0:
10172 ret = fold_builtin_0 (loc, fndecl, ignore);
10173 break;
10174 case 1:
10175 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10176 break;
10177 case 2:
10178 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10179 break;
10180 case 3:
10181 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10182 break;
10183 case 4:
10184 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10185 ignore);
10186 break;
10187 default:
10188 break;
10190 if (ret)
10192 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10193 SET_EXPR_LOCATION (ret, loc);
10194 TREE_NO_WARNING (ret) = 1;
10195 return ret;
10197 return NULL_TREE;
10200 /* Builtins with folding operations that operate on "..." arguments
10201 need special handling; we need to store the arguments in a convenient
10202 data structure before attempting any folding. Fortunately there are
10203 only a few builtins that fall into this category. FNDECL is the
10204 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10205 result of the function call is ignored. */
10207 static tree
10208 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10209 bool ignore ATTRIBUTE_UNUSED)
10211 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10212 tree ret = NULL_TREE;
10214 switch (fcode)
10216 case BUILT_IN_SPRINTF_CHK:
10217 case BUILT_IN_VSPRINTF_CHK:
10218 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10219 break;
10221 case BUILT_IN_SNPRINTF_CHK:
10222 case BUILT_IN_VSNPRINTF_CHK:
10223 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10224 break;
10226 case BUILT_IN_FPCLASSIFY:
10227 ret = fold_builtin_fpclassify (loc, exp);
10228 break;
10230 default:
10231 break;
10233 if (ret)
10235 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10236 SET_EXPR_LOCATION (ret, loc);
10237 TREE_NO_WARNING (ret) = 1;
10238 return ret;
10240 return NULL_TREE;
10243 /* Return true if FNDECL shouldn't be folded right now.
10244 If a built-in function has an inline attribute always_inline
10245 wrapper, defer folding it after always_inline functions have
10246 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10247 might not be performed. */
10249 static bool
10250 avoid_folding_inline_builtin (tree fndecl)
10252 return (DECL_DECLARED_INLINE_P (fndecl)
10253 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10254 && cfun
10255 && !cfun->always_inline_functions_inlined
10256 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10259 /* A wrapper function for builtin folding that prevents warnings for
10260 "statement without effect" and the like, caused by removing the
10261 call node earlier than the warning is generated. */
10263 tree
10264 fold_call_expr (location_t loc, tree exp, bool ignore)
10266 tree ret = NULL_TREE;
10267 tree fndecl = get_callee_fndecl (exp);
10268 if (fndecl
10269 && TREE_CODE (fndecl) == FUNCTION_DECL
10270 && DECL_BUILT_IN (fndecl)
10271 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10272 yet. Defer folding until we see all the arguments
10273 (after inlining). */
10274 && !CALL_EXPR_VA_ARG_PACK (exp))
10276 int nargs = call_expr_nargs (exp);
10278 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10279 instead last argument is __builtin_va_arg_pack (). Defer folding
10280 even in that case, until arguments are finalized. */
10281 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10283 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10284 if (fndecl2
10285 && TREE_CODE (fndecl2) == FUNCTION_DECL
10286 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10287 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10288 return NULL_TREE;
10291 if (avoid_folding_inline_builtin (fndecl))
10292 return NULL_TREE;
10294 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10295 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10296 CALL_EXPR_ARGP (exp), ignore);
10297 else
10299 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10301 tree *args = CALL_EXPR_ARGP (exp);
10302 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10304 if (!ret)
10305 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10306 if (ret)
10307 return ret;
10310 return NULL_TREE;
10313 /* Conveniently construct a function call expression. FNDECL names the
10314 function to be called and N arguments are passed in the array
10315 ARGARRAY. */
10317 tree
10318 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10320 tree fntype = TREE_TYPE (fndecl);
10321 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10323 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10326 /* Conveniently construct a function call expression. FNDECL names the
10327 function to be called and the arguments are passed in the vector
10328 VEC. */
10330 tree
10331 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10333 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10334 VEC_address (tree, vec));
10338 /* Conveniently construct a function call expression. FNDECL names the
10339 function to be called, N is the number of arguments, and the "..."
10340 parameters are the argument expressions. */
10342 tree
10343 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10345 va_list ap;
10346 tree *argarray = XALLOCAVEC (tree, n);
10347 int i;
10349 va_start (ap, n);
10350 for (i = 0; i < n; i++)
10351 argarray[i] = va_arg (ap, tree);
10352 va_end (ap);
10353 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10356 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10357 varargs macros aren't supported by all bootstrap compilers. */
10359 tree
10360 build_call_expr (tree fndecl, int n, ...)
10362 va_list ap;
10363 tree *argarray = XALLOCAVEC (tree, n);
10364 int i;
10366 va_start (ap, n);
10367 for (i = 0; i < n; i++)
10368 argarray[i] = va_arg (ap, tree);
10369 va_end (ap);
10370 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10373 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10374 N arguments are passed in the array ARGARRAY. */
10376 tree
10377 fold_builtin_call_array (location_t loc, tree type,
10378 tree fn,
10379 int n,
10380 tree *argarray)
10382 tree ret = NULL_TREE;
10383 tree exp;
10385 if (TREE_CODE (fn) == ADDR_EXPR)
10387 tree fndecl = TREE_OPERAND (fn, 0);
10388 if (TREE_CODE (fndecl) == FUNCTION_DECL
10389 && DECL_BUILT_IN (fndecl))
10391 /* If last argument is __builtin_va_arg_pack (), arguments to this
10392 function are not finalized yet. Defer folding until they are. */
10393 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10395 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10396 if (fndecl2
10397 && TREE_CODE (fndecl2) == FUNCTION_DECL
10398 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10399 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10400 return build_call_array_loc (loc, type, fn, n, argarray);
10402 if (avoid_folding_inline_builtin (fndecl))
10403 return build_call_array_loc (loc, type, fn, n, argarray);
10404 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10406 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10407 if (ret)
10408 return ret;
10410 return build_call_array_loc (loc, type, fn, n, argarray);
10412 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10414 /* First try the transformations that don't require consing up
10415 an exp. */
10416 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10417 if (ret)
10418 return ret;
10421 /* If we got this far, we need to build an exp. */
10422 exp = build_call_array_loc (loc, type, fn, n, argarray);
10423 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10424 return ret ? ret : exp;
10428 return build_call_array_loc (loc, type, fn, n, argarray);
10431 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10432 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10433 of arguments in ARGS to be omitted. OLDNARGS is the number of
10434 elements in ARGS. */
10436 static tree
10437 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10438 int skip, tree fndecl, int n, va_list newargs)
10440 int nargs = oldnargs - skip + n;
10441 tree *buffer;
10443 if (n > 0)
10445 int i, j;
10447 buffer = XALLOCAVEC (tree, nargs);
10448 for (i = 0; i < n; i++)
10449 buffer[i] = va_arg (newargs, tree);
10450 for (j = skip; j < oldnargs; j++, i++)
10451 buffer[i] = args[j];
10453 else
10454 buffer = args + skip;
10456 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10459 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10460 list ARGS along with N new arguments specified as the "..."
10461 parameters. SKIP is the number of arguments in ARGS to be omitted.
10462 OLDNARGS is the number of elements in ARGS. */
10464 static tree
10465 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
10466 int skip, tree fndecl, int n, ...)
10468 va_list ap;
10469 tree t;
10471 va_start (ap, n);
10472 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
10473 va_end (ap);
10475 return t;
10478 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10479 along with N new arguments specified as the "..." parameters. SKIP
10480 is the number of arguments in EXP to be omitted. This function is used
10481 to do varargs-to-varargs transformations. */
10483 static tree
10484 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10486 va_list ap;
10487 tree t;
10489 va_start (ap, n);
10490 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10491 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10492 va_end (ap);
10494 return t;
10497 /* Validate a single argument ARG against a tree code CODE representing
10498 a type. */
10500 static bool
10501 validate_arg (const_tree arg, enum tree_code code)
10503 if (!arg)
10504 return false;
10505 else if (code == POINTER_TYPE)
10506 return POINTER_TYPE_P (TREE_TYPE (arg));
10507 else if (code == INTEGER_TYPE)
10508 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10509 return code == TREE_CODE (TREE_TYPE (arg));
10512 /* This function validates the types of a function call argument list
10513 against a specified list of tree_codes. If the last specifier is a 0,
10514 that represents an ellipses, otherwise the last specifier must be a
10515 VOID_TYPE.
10517 This is the GIMPLE version of validate_arglist. Eventually we want to
10518 completely convert builtins.c to work from GIMPLEs and the tree based
10519 validate_arglist will then be removed. */
10521 bool
10522 validate_gimple_arglist (const_gimple call, ...)
10524 enum tree_code code;
10525 bool res = 0;
10526 va_list ap;
10527 const_tree arg;
10528 size_t i;
10530 va_start (ap, call);
10531 i = 0;
10535 code = (enum tree_code) va_arg (ap, int);
10536 switch (code)
10538 case 0:
10539 /* This signifies an ellipses, any further arguments are all ok. */
10540 res = true;
10541 goto end;
10542 case VOID_TYPE:
10543 /* This signifies an endlink, if no arguments remain, return
10544 true, otherwise return false. */
10545 res = (i == gimple_call_num_args (call));
10546 goto end;
10547 default:
10548 /* If no parameters remain or the parameter's code does not
10549 match the specified code, return false. Otherwise continue
10550 checking any remaining arguments. */
10551 arg = gimple_call_arg (call, i++);
10552 if (!validate_arg (arg, code))
10553 goto end;
10554 break;
10557 while (1);
10559 /* We need gotos here since we can only have one VA_CLOSE in a
10560 function. */
10561 end: ;
10562 va_end (ap);
10564 return res;
10567 /* This function validates the types of a function call argument list
10568 against a specified list of tree_codes. If the last specifier is a 0,
10569 that represents an ellipses, otherwise the last specifier must be a
10570 VOID_TYPE. */
10572 bool
10573 validate_arglist (const_tree callexpr, ...)
10575 enum tree_code code;
10576 bool res = 0;
10577 va_list ap;
10578 const_call_expr_arg_iterator iter;
10579 const_tree arg;
10581 va_start (ap, callexpr);
10582 init_const_call_expr_arg_iterator (callexpr, &iter);
10586 code = (enum tree_code) va_arg (ap, int);
10587 switch (code)
10589 case 0:
10590 /* This signifies an ellipses, any further arguments are all ok. */
10591 res = true;
10592 goto end;
10593 case VOID_TYPE:
10594 /* This signifies an endlink, if no arguments remain, return
10595 true, otherwise return false. */
10596 res = !more_const_call_expr_args_p (&iter);
10597 goto end;
10598 default:
10599 /* If no parameters remain or the parameter's code does not
10600 match the specified code, return false. Otherwise continue
10601 checking any remaining arguments. */
10602 arg = next_const_call_expr_arg (&iter);
10603 if (!validate_arg (arg, code))
10604 goto end;
10605 break;
10608 while (1);
10610 /* We need gotos here since we can only have one VA_CLOSE in a
10611 function. */
10612 end: ;
10613 va_end (ap);
10615 return res;
10618 /* Default target-specific builtin expander that does nothing. */
10621 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10622 rtx target ATTRIBUTE_UNUSED,
10623 rtx subtarget ATTRIBUTE_UNUSED,
10624 enum machine_mode mode ATTRIBUTE_UNUSED,
10625 int ignore ATTRIBUTE_UNUSED)
10627 return NULL_RTX;
10630 /* Returns true is EXP represents data that would potentially reside
10631 in a readonly section. */
10633 static bool
10634 readonly_data_expr (tree exp)
10636 STRIP_NOPS (exp);
10638 if (TREE_CODE (exp) != ADDR_EXPR)
10639 return false;
10641 exp = get_base_address (TREE_OPERAND (exp, 0));
10642 if (!exp)
10643 return false;
10645 /* Make sure we call decl_readonly_section only for trees it
10646 can handle (since it returns true for everything it doesn't
10647 understand). */
10648 if (TREE_CODE (exp) == STRING_CST
10649 || TREE_CODE (exp) == CONSTRUCTOR
10650 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10651 return decl_readonly_section (exp, 0);
10652 else
10653 return false;
10656 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10657 to the call, and TYPE is its return type.
10659 Return NULL_TREE if no simplification was possible, otherwise return the
10660 simplified form of the call as a tree.
10662 The simplified form may be a constant or other expression which
10663 computes the same value, but in a more efficient manner (including
10664 calls to other builtin functions).
10666 The call may contain arguments which need to be evaluated, but
10667 which are not useful to determine the result of the call. In
10668 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10669 COMPOUND_EXPR will be an argument which must be evaluated.
10670 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10671 COMPOUND_EXPR in the chain will contain the tree for the simplified
10672 form of the builtin function call. */
10674 static tree
10675 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10677 if (!validate_arg (s1, POINTER_TYPE)
10678 || !validate_arg (s2, POINTER_TYPE))
10679 return NULL_TREE;
10680 else
10682 tree fn;
10683 const char *p1, *p2;
10685 p2 = c_getstr (s2);
10686 if (p2 == NULL)
10687 return NULL_TREE;
10689 p1 = c_getstr (s1);
10690 if (p1 != NULL)
10692 const char *r = strstr (p1, p2);
10693 tree tem;
10695 if (r == NULL)
10696 return build_int_cst (TREE_TYPE (s1), 0);
10698 /* Return an offset into the constant string argument. */
10699 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10700 s1, size_int (r - p1));
10701 return fold_convert_loc (loc, type, tem);
10704 /* The argument is const char *, and the result is char *, so we need
10705 a type conversion here to avoid a warning. */
10706 if (p2[0] == '\0')
10707 return fold_convert_loc (loc, type, s1);
10709 if (p2[1] != '\0')
10710 return NULL_TREE;
10712 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10713 if (!fn)
10714 return NULL_TREE;
10716 /* New argument list transforming strstr(s1, s2) to
10717 strchr(s1, s2[0]). */
10718 return build_call_expr_loc (loc, fn, 2, s1,
10719 build_int_cst (integer_type_node, p2[0]));
10723 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10724 the call, and TYPE is its return type.
10726 Return NULL_TREE if no simplification was possible, otherwise return the
10727 simplified form of the call as a tree.
10729 The simplified form may be a constant or other expression which
10730 computes the same value, but in a more efficient manner (including
10731 calls to other builtin functions).
10733 The call may contain arguments which need to be evaluated, but
10734 which are not useful to determine the result of the call. In
10735 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10736 COMPOUND_EXPR will be an argument which must be evaluated.
10737 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10738 COMPOUND_EXPR in the chain will contain the tree for the simplified
10739 form of the builtin function call. */
10741 static tree
10742 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10744 if (!validate_arg (s1, POINTER_TYPE)
10745 || !validate_arg (s2, INTEGER_TYPE))
10746 return NULL_TREE;
10747 else
10749 const char *p1;
10751 if (TREE_CODE (s2) != INTEGER_CST)
10752 return NULL_TREE;
10754 p1 = c_getstr (s1);
10755 if (p1 != NULL)
10757 char c;
10758 const char *r;
10759 tree tem;
10761 if (target_char_cast (s2, &c))
10762 return NULL_TREE;
10764 r = strchr (p1, c);
10766 if (r == NULL)
10767 return build_int_cst (TREE_TYPE (s1), 0);
10769 /* Return an offset into the constant string argument. */
10770 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10771 s1, size_int (r - p1));
10772 return fold_convert_loc (loc, type, tem);
10774 return NULL_TREE;
10778 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10779 the call, and TYPE is its return type.
10781 Return NULL_TREE if no simplification was possible, otherwise return the
10782 simplified form of the call as a tree.
10784 The simplified form may be a constant or other expression which
10785 computes the same value, but in a more efficient manner (including
10786 calls to other builtin functions).
10788 The call may contain arguments which need to be evaluated, but
10789 which are not useful to determine the result of the call. In
10790 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10791 COMPOUND_EXPR will be an argument which must be evaluated.
10792 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10793 COMPOUND_EXPR in the chain will contain the tree for the simplified
10794 form of the builtin function call. */
10796 static tree
10797 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10799 if (!validate_arg (s1, POINTER_TYPE)
10800 || !validate_arg (s2, INTEGER_TYPE))
10801 return NULL_TREE;
10802 else
10804 tree fn;
10805 const char *p1;
10807 if (TREE_CODE (s2) != INTEGER_CST)
10808 return NULL_TREE;
10810 p1 = c_getstr (s1);
10811 if (p1 != NULL)
10813 char c;
10814 const char *r;
10815 tree tem;
10817 if (target_char_cast (s2, &c))
10818 return NULL_TREE;
10820 r = strrchr (p1, c);
10822 if (r == NULL)
10823 return build_int_cst (TREE_TYPE (s1), 0);
10825 /* Return an offset into the constant string argument. */
10826 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10827 s1, size_int (r - p1));
10828 return fold_convert_loc (loc, type, tem);
10831 if (! integer_zerop (s2))
10832 return NULL_TREE;
10834 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10835 if (!fn)
10836 return NULL_TREE;
10838 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10839 return build_call_expr_loc (loc, fn, 2, s1, s2);
10843 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10844 to the call, and TYPE is its return type.
10846 Return NULL_TREE if no simplification was possible, otherwise return the
10847 simplified form of the call as a tree.
10849 The simplified form may be a constant or other expression which
10850 computes the same value, but in a more efficient manner (including
10851 calls to other builtin functions).
10853 The call may contain arguments which need to be evaluated, but
10854 which are not useful to determine the result of the call. In
10855 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10856 COMPOUND_EXPR will be an argument which must be evaluated.
10857 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10858 COMPOUND_EXPR in the chain will contain the tree for the simplified
10859 form of the builtin function call. */
10861 static tree
10862 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10864 if (!validate_arg (s1, POINTER_TYPE)
10865 || !validate_arg (s2, POINTER_TYPE))
10866 return NULL_TREE;
10867 else
10869 tree fn;
10870 const char *p1, *p2;
10872 p2 = c_getstr (s2);
10873 if (p2 == NULL)
10874 return NULL_TREE;
10876 p1 = c_getstr (s1);
10877 if (p1 != NULL)
10879 const char *r = strpbrk (p1, p2);
10880 tree tem;
10882 if (r == NULL)
10883 return build_int_cst (TREE_TYPE (s1), 0);
10885 /* Return an offset into the constant string argument. */
10886 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10887 s1, size_int (r - p1));
10888 return fold_convert_loc (loc, type, tem);
10891 if (p2[0] == '\0')
10892 /* strpbrk(x, "") == NULL.
10893 Evaluate and ignore s1 in case it had side-effects. */
10894 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10896 if (p2[1] != '\0')
10897 return NULL_TREE; /* Really call strpbrk. */
10899 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10900 if (!fn)
10901 return NULL_TREE;
10903 /* New argument list transforming strpbrk(s1, s2) to
10904 strchr(s1, s2[0]). */
10905 return build_call_expr_loc (loc, fn, 2, s1,
10906 build_int_cst (integer_type_node, p2[0]));
10910 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10911 to the call.
10913 Return NULL_TREE if no simplification was possible, otherwise return the
10914 simplified form of the call as a tree.
10916 The simplified form may be a constant or other expression which
10917 computes the same value, but in a more efficient manner (including
10918 calls to other builtin functions).
10920 The call may contain arguments which need to be evaluated, but
10921 which are not useful to determine the result of the call. In
10922 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10923 COMPOUND_EXPR will be an argument which must be evaluated.
10924 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10925 COMPOUND_EXPR in the chain will contain the tree for the simplified
10926 form of the builtin function call. */
10928 static tree
10929 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
10931 if (!validate_arg (dst, POINTER_TYPE)
10932 || !validate_arg (src, POINTER_TYPE))
10933 return NULL_TREE;
10934 else
10936 const char *p = c_getstr (src);
10938 /* If the string length is zero, return the dst parameter. */
10939 if (p && *p == '\0')
10940 return dst;
10942 if (optimize_insn_for_speed_p ())
10944 /* See if we can store by pieces into (dst + strlen(dst)). */
10945 tree newdst, call;
10946 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
10947 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10949 if (!strlen_fn || !strcpy_fn)
10950 return NULL_TREE;
10952 /* If we don't have a movstr we don't want to emit an strcpy
10953 call. We have to do that if the length of the source string
10954 isn't computable (in that case we can use memcpy probably
10955 later expanding to a sequence of mov instructions). If we
10956 have movstr instructions we can emit strcpy calls. */
10957 if (!HAVE_movstr)
10959 tree len = c_strlen (src, 1);
10960 if (! len || TREE_SIDE_EFFECTS (len))
10961 return NULL_TREE;
10964 /* Stabilize the argument list. */
10965 dst = builtin_save_expr (dst);
10967 /* Create strlen (dst). */
10968 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
10969 /* Create (dst p+ strlen (dst)). */
10971 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
10972 TREE_TYPE (dst), dst, newdst);
10973 newdst = builtin_save_expr (newdst);
10975 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
10976 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
10978 return NULL_TREE;
10982 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10983 arguments to the call.
10985 Return NULL_TREE if no simplification was possible, otherwise return the
10986 simplified form of the call as a tree.
10988 The simplified form may be a constant or other expression which
10989 computes the same value, but in a more efficient manner (including
10990 calls to other builtin functions).
10992 The call may contain arguments which need to be evaluated, but
10993 which are not useful to determine the result of the call. In
10994 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10995 COMPOUND_EXPR will be an argument which must be evaluated.
10996 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10997 COMPOUND_EXPR in the chain will contain the tree for the simplified
10998 form of the builtin function call. */
11000 static tree
11001 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11003 if (!validate_arg (dst, POINTER_TYPE)
11004 || !validate_arg (src, POINTER_TYPE)
11005 || !validate_arg (len, INTEGER_TYPE))
11006 return NULL_TREE;
11007 else
11009 const char *p = c_getstr (src);
11011 /* If the requested length is zero, or the src parameter string
11012 length is zero, return the dst parameter. */
11013 if (integer_zerop (len) || (p && *p == '\0'))
11014 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11016 /* If the requested len is greater than or equal to the string
11017 length, call strcat. */
11018 if (TREE_CODE (len) == INTEGER_CST && p
11019 && compare_tree_int (len, strlen (p)) >= 0)
11021 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11023 /* If the replacement _DECL isn't initialized, don't do the
11024 transformation. */
11025 if (!fn)
11026 return NULL_TREE;
11028 return build_call_expr_loc (loc, fn, 2, dst, src);
11030 return NULL_TREE;
11034 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11035 to the call.
11037 Return NULL_TREE if no simplification was possible, otherwise return the
11038 simplified form of the call as a tree.
11040 The simplified form may be a constant or other expression which
11041 computes the same value, but in a more efficient manner (including
11042 calls to other builtin functions).
11044 The call may contain arguments which need to be evaluated, but
11045 which are not useful to determine the result of the call. In
11046 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11047 COMPOUND_EXPR will be an argument which must be evaluated.
11048 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11049 COMPOUND_EXPR in the chain will contain the tree for the simplified
11050 form of the builtin function call. */
11052 static tree
11053 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11055 if (!validate_arg (s1, POINTER_TYPE)
11056 || !validate_arg (s2, POINTER_TYPE))
11057 return NULL_TREE;
11058 else
11060 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11062 /* If both arguments are constants, evaluate at compile-time. */
11063 if (p1 && p2)
11065 const size_t r = strspn (p1, p2);
11066 return size_int (r);
11069 /* If either argument is "", return NULL_TREE. */
11070 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11071 /* Evaluate and ignore both arguments in case either one has
11072 side-effects. */
11073 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11074 s1, s2);
11075 return NULL_TREE;
11079 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11080 to the call.
11082 Return NULL_TREE if no simplification was possible, otherwise return the
11083 simplified form of the call as a tree.
11085 The simplified form may be a constant or other expression which
11086 computes the same value, but in a more efficient manner (including
11087 calls to other builtin functions).
11089 The call may contain arguments which need to be evaluated, but
11090 which are not useful to determine the result of the call. In
11091 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11092 COMPOUND_EXPR will be an argument which must be evaluated.
11093 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11094 COMPOUND_EXPR in the chain will contain the tree for the simplified
11095 form of the builtin function call. */
11097 static tree
11098 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11100 if (!validate_arg (s1, POINTER_TYPE)
11101 || !validate_arg (s2, POINTER_TYPE))
11102 return NULL_TREE;
11103 else
11105 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11107 /* If both arguments are constants, evaluate at compile-time. */
11108 if (p1 && p2)
11110 const size_t r = strcspn (p1, p2);
11111 return size_int (r);
11114 /* If the first argument is "", return NULL_TREE. */
11115 if (p1 && *p1 == '\0')
11117 /* Evaluate and ignore argument s2 in case it has
11118 side-effects. */
11119 return omit_one_operand_loc (loc, size_type_node,
11120 size_zero_node, s2);
11123 /* If the second argument is "", return __builtin_strlen(s1). */
11124 if (p2 && *p2 == '\0')
11126 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11128 /* If the replacement _DECL isn't initialized, don't do the
11129 transformation. */
11130 if (!fn)
11131 return NULL_TREE;
11133 return build_call_expr_loc (loc, fn, 1, s1);
11135 return NULL_TREE;
11139 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11140 to the call. IGNORE is true if the value returned
11141 by the builtin will be ignored. UNLOCKED is true is true if this
11142 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11143 the known length of the string. Return NULL_TREE if no simplification
11144 was possible. */
11146 tree
11147 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11148 bool ignore, bool unlocked, tree len)
11150 /* If we're using an unlocked function, assume the other unlocked
11151 functions exist explicitly. */
11152 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11153 : implicit_built_in_decls[BUILT_IN_FPUTC];
11154 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11155 : implicit_built_in_decls[BUILT_IN_FWRITE];
11157 /* If the return value is used, don't do the transformation. */
11158 if (!ignore)
11159 return NULL_TREE;
11161 /* Verify the arguments in the original call. */
11162 if (!validate_arg (arg0, POINTER_TYPE)
11163 || !validate_arg (arg1, POINTER_TYPE))
11164 return NULL_TREE;
11166 if (! len)
11167 len = c_strlen (arg0, 0);
11169 /* Get the length of the string passed to fputs. If the length
11170 can't be determined, punt. */
11171 if (!len
11172 || TREE_CODE (len) != INTEGER_CST)
11173 return NULL_TREE;
11175 switch (compare_tree_int (len, 1))
11177 case -1: /* length is 0, delete the call entirely . */
11178 return omit_one_operand_loc (loc, integer_type_node,
11179 integer_zero_node, arg1);;
11181 case 0: /* length is 1, call fputc. */
11183 const char *p = c_getstr (arg0);
11185 if (p != NULL)
11187 if (fn_fputc)
11188 return build_call_expr_loc (loc, fn_fputc, 2,
11189 build_int_cst
11190 (integer_type_node, p[0]), arg1);
11191 else
11192 return NULL_TREE;
11195 /* FALLTHROUGH */
11196 case 1: /* length is greater than 1, call fwrite. */
11198 /* If optimizing for size keep fputs. */
11199 if (optimize_function_for_size_p (cfun))
11200 return NULL_TREE;
11201 /* New argument list transforming fputs(string, stream) to
11202 fwrite(string, 1, len, stream). */
11203 if (fn_fwrite)
11204 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11205 size_one_node, len, arg1);
11206 else
11207 return NULL_TREE;
11209 default:
11210 gcc_unreachable ();
11212 return NULL_TREE;
11215 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11216 produced. False otherwise. This is done so that we don't output the error
11217 or warning twice or three times. */
11219 bool
11220 fold_builtin_next_arg (tree exp, bool va_start_p)
11222 tree fntype = TREE_TYPE (current_function_decl);
11223 int nargs = call_expr_nargs (exp);
11224 tree arg;
11226 if (!stdarg_p (fntype))
11228 error ("%<va_start%> used in function with fixed args");
11229 return true;
11232 if (va_start_p)
11234 if (va_start_p && (nargs != 2))
11236 error ("wrong number of arguments to function %<va_start%>");
11237 return true;
11239 arg = CALL_EXPR_ARG (exp, 1);
11241 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11242 when we checked the arguments and if needed issued a warning. */
11243 else
11245 if (nargs == 0)
11247 /* Evidently an out of date version of <stdarg.h>; can't validate
11248 va_start's second argument, but can still work as intended. */
11249 warning (0, "%<__builtin_next_arg%> called without an argument");
11250 return true;
11252 else if (nargs > 1)
11254 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11255 return true;
11257 arg = CALL_EXPR_ARG (exp, 0);
11260 if (TREE_CODE (arg) == SSA_NAME)
11261 arg = SSA_NAME_VAR (arg);
11263 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11264 or __builtin_next_arg (0) the first time we see it, after checking
11265 the arguments and if needed issuing a warning. */
11266 if (!integer_zerop (arg))
11268 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11270 /* Strip off all nops for the sake of the comparison. This
11271 is not quite the same as STRIP_NOPS. It does more.
11272 We must also strip off INDIRECT_EXPR for C++ reference
11273 parameters. */
11274 while (CONVERT_EXPR_P (arg)
11275 || TREE_CODE (arg) == INDIRECT_REF)
11276 arg = TREE_OPERAND (arg, 0);
11277 if (arg != last_parm)
11279 /* FIXME: Sometimes with the tree optimizers we can get the
11280 not the last argument even though the user used the last
11281 argument. We just warn and set the arg to be the last
11282 argument so that we will get wrong-code because of
11283 it. */
11284 warning (0, "second parameter of %<va_start%> not last named argument");
11287 /* Undefined by C99 7.15.1.4p4 (va_start):
11288 "If the parameter parmN is declared with the register storage
11289 class, with a function or array type, or with a type that is
11290 not compatible with the type that results after application of
11291 the default argument promotions, the behavior is undefined."
11293 else if (DECL_REGISTER (arg))
11294 warning (0, "undefined behaviour when second parameter of "
11295 "%<va_start%> is declared with %<register%> storage");
11297 /* We want to verify the second parameter just once before the tree
11298 optimizers are run and then avoid keeping it in the tree,
11299 as otherwise we could warn even for correct code like:
11300 void foo (int i, ...)
11301 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11302 if (va_start_p)
11303 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11304 else
11305 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11307 return false;
11311 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11312 ORIG may be null if this is a 2-argument call. We don't attempt to
11313 simplify calls with more than 3 arguments.
11315 Return NULL_TREE if no simplification was possible, otherwise return the
11316 simplified form of the call as a tree. If IGNORED is true, it means that
11317 the caller does not use the returned value of the function. */
11319 static tree
11320 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11321 tree orig, int ignored)
11323 tree call, retval;
11324 const char *fmt_str = NULL;
11326 /* Verify the required arguments in the original call. We deal with two
11327 types of sprintf() calls: 'sprintf (str, fmt)' and
11328 'sprintf (dest, "%s", orig)'. */
11329 if (!validate_arg (dest, POINTER_TYPE)
11330 || !validate_arg (fmt, POINTER_TYPE))
11331 return NULL_TREE;
11332 if (orig && !validate_arg (orig, POINTER_TYPE))
11333 return NULL_TREE;
11335 /* Check whether the format is a literal string constant. */
11336 fmt_str = c_getstr (fmt);
11337 if (fmt_str == NULL)
11338 return NULL_TREE;
11340 call = NULL_TREE;
11341 retval = NULL_TREE;
11343 if (!init_target_chars ())
11344 return NULL_TREE;
11346 /* If the format doesn't contain % args or %%, use strcpy. */
11347 if (strchr (fmt_str, target_percent) == NULL)
11349 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11351 if (!fn)
11352 return NULL_TREE;
11354 /* Don't optimize sprintf (buf, "abc", ptr++). */
11355 if (orig)
11356 return NULL_TREE;
11358 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11359 'format' is known to contain no % formats. */
11360 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11361 if (!ignored)
11362 retval = build_int_cst (integer_type_node, strlen (fmt_str));
11365 /* If the format is "%s", use strcpy if the result isn't used. */
11366 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11368 tree fn;
11369 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11371 if (!fn)
11372 return NULL_TREE;
11374 /* Don't crash on sprintf (str1, "%s"). */
11375 if (!orig)
11376 return NULL_TREE;
11378 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11379 if (!ignored)
11381 retval = c_strlen (orig, 1);
11382 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11383 return NULL_TREE;
11385 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11388 if (call && retval)
11390 retval = fold_convert_loc
11391 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11392 retval);
11393 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11395 else
11396 return call;
11399 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
11400 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
11401 attempt to simplify calls with more than 4 arguments.
11403 Return NULL_TREE if no simplification was possible, otherwise return the
11404 simplified form of the call as a tree. If IGNORED is true, it means that
11405 the caller does not use the returned value of the function. */
11407 static tree
11408 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
11409 tree orig, int ignored)
11411 tree call, retval;
11412 const char *fmt_str = NULL;
11413 unsigned HOST_WIDE_INT destlen;
11415 /* Verify the required arguments in the original call. We deal with two
11416 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
11417 'snprintf (dest, cst, "%s", orig)'. */
11418 if (!validate_arg (dest, POINTER_TYPE)
11419 || !validate_arg (destsize, INTEGER_TYPE)
11420 || !validate_arg (fmt, POINTER_TYPE))
11421 return NULL_TREE;
11422 if (orig && !validate_arg (orig, POINTER_TYPE))
11423 return NULL_TREE;
11425 if (!host_integerp (destsize, 1))
11426 return NULL_TREE;
11428 /* Check whether the format is a literal string constant. */
11429 fmt_str = c_getstr (fmt);
11430 if (fmt_str == NULL)
11431 return NULL_TREE;
11433 call = NULL_TREE;
11434 retval = NULL_TREE;
11436 if (!init_target_chars ())
11437 return NULL_TREE;
11439 destlen = tree_low_cst (destsize, 1);
11441 /* If the format doesn't contain % args or %%, use strcpy. */
11442 if (strchr (fmt_str, target_percent) == NULL)
11444 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11445 size_t len = strlen (fmt_str);
11447 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
11448 if (orig)
11449 return NULL_TREE;
11451 /* We could expand this as
11452 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
11453 or to
11454 memcpy (str, fmt_with_nul_at_cstm1, cst);
11455 but in the former case that might increase code size
11456 and in the latter case grow .rodata section too much.
11457 So punt for now. */
11458 if (len >= destlen)
11459 return NULL_TREE;
11461 if (!fn)
11462 return NULL_TREE;
11464 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
11465 'format' is known to contain no % formats and
11466 strlen (fmt) < cst. */
11467 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11469 if (!ignored)
11470 retval = build_int_cst (integer_type_node, strlen (fmt_str));
11473 /* If the format is "%s", use strcpy if the result isn't used. */
11474 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11476 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11477 unsigned HOST_WIDE_INT origlen;
11479 /* Don't crash on snprintf (str1, cst, "%s"). */
11480 if (!orig)
11481 return NULL_TREE;
11483 retval = c_strlen (orig, 1);
11484 if (!retval || !host_integerp (retval, 1))
11485 return NULL_TREE;
11487 origlen = tree_low_cst (retval, 1);
11488 /* We could expand this as
11489 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
11490 or to
11491 memcpy (str1, str2_with_nul_at_cstm1, cst);
11492 but in the former case that might increase code size
11493 and in the latter case grow .rodata section too much.
11494 So punt for now. */
11495 if (origlen >= destlen)
11496 return NULL_TREE;
11498 /* Convert snprintf (str1, cst, "%s", str2) into
11499 strcpy (str1, str2) if strlen (str2) < cst. */
11500 if (!fn)
11501 return NULL_TREE;
11503 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11505 if (ignored)
11506 retval = NULL_TREE;
11509 if (call && retval)
11511 tree fn = built_in_decls[BUILT_IN_SNPRINTF];
11512 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
11513 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11515 else
11516 return call;
11519 /* Expand a call EXP to __builtin_object_size. */
11522 expand_builtin_object_size (tree exp)
11524 tree ost;
11525 int object_size_type;
11526 tree fndecl = get_callee_fndecl (exp);
11528 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11530 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11531 exp, fndecl);
11532 expand_builtin_trap ();
11533 return const0_rtx;
11536 ost = CALL_EXPR_ARG (exp, 1);
11537 STRIP_NOPS (ost);
11539 if (TREE_CODE (ost) != INTEGER_CST
11540 || tree_int_cst_sgn (ost) < 0
11541 || compare_tree_int (ost, 3) > 0)
11543 error ("%Klast argument of %D is not integer constant between 0 and 3",
11544 exp, fndecl);
11545 expand_builtin_trap ();
11546 return const0_rtx;
11549 object_size_type = tree_low_cst (ost, 0);
11551 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11554 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11555 FCODE is the BUILT_IN_* to use.
11556 Return NULL_RTX if we failed; the caller should emit a normal call,
11557 otherwise try to get the result in TARGET, if convenient (and in
11558 mode MODE if that's convenient). */
11560 static rtx
11561 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11562 enum built_in_function fcode)
11564 tree dest, src, len, size;
11566 if (!validate_arglist (exp,
11567 POINTER_TYPE,
11568 fcode == BUILT_IN_MEMSET_CHK
11569 ? INTEGER_TYPE : POINTER_TYPE,
11570 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11571 return NULL_RTX;
11573 dest = CALL_EXPR_ARG (exp, 0);
11574 src = CALL_EXPR_ARG (exp, 1);
11575 len = CALL_EXPR_ARG (exp, 2);
11576 size = CALL_EXPR_ARG (exp, 3);
11578 if (! host_integerp (size, 1))
11579 return NULL_RTX;
11581 if (host_integerp (len, 1) || integer_all_onesp (size))
11583 tree fn;
11585 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11587 warning_at (tree_nonartificial_location (exp),
11588 0, "%Kcall to %D will always overflow destination buffer",
11589 exp, get_callee_fndecl (exp));
11590 return NULL_RTX;
11593 fn = NULL_TREE;
11594 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11595 mem{cpy,pcpy,move,set} is available. */
11596 switch (fcode)
11598 case BUILT_IN_MEMCPY_CHK:
11599 fn = built_in_decls[BUILT_IN_MEMCPY];
11600 break;
11601 case BUILT_IN_MEMPCPY_CHK:
11602 fn = built_in_decls[BUILT_IN_MEMPCPY];
11603 break;
11604 case BUILT_IN_MEMMOVE_CHK:
11605 fn = built_in_decls[BUILT_IN_MEMMOVE];
11606 break;
11607 case BUILT_IN_MEMSET_CHK:
11608 fn = built_in_decls[BUILT_IN_MEMSET];
11609 break;
11610 default:
11611 break;
11614 if (! fn)
11615 return NULL_RTX;
11617 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11618 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11619 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11620 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11622 else if (fcode == BUILT_IN_MEMSET_CHK)
11623 return NULL_RTX;
11624 else
11626 unsigned int dest_align
11627 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11629 /* If DEST is not a pointer type, call the normal function. */
11630 if (dest_align == 0)
11631 return NULL_RTX;
11633 /* If SRC and DEST are the same (and not volatile), do nothing. */
11634 if (operand_equal_p (src, dest, 0))
11636 tree expr;
11638 if (fcode != BUILT_IN_MEMPCPY_CHK)
11640 /* Evaluate and ignore LEN in case it has side-effects. */
11641 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11642 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11645 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11646 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11649 /* __memmove_chk special case. */
11650 if (fcode == BUILT_IN_MEMMOVE_CHK)
11652 unsigned int src_align
11653 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11655 if (src_align == 0)
11656 return NULL_RTX;
11658 /* If src is categorized for a readonly section we can use
11659 normal __memcpy_chk. */
11660 if (readonly_data_expr (src))
11662 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11663 if (!fn)
11664 return NULL_RTX;
11665 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11666 dest, src, len, size);
11667 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11668 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11669 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11672 return NULL_RTX;
11676 /* Emit warning if a buffer overflow is detected at compile time. */
11678 static void
11679 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11681 int is_strlen = 0;
11682 tree len, size;
11683 location_t loc = tree_nonartificial_location (exp);
11685 switch (fcode)
11687 case BUILT_IN_STRCPY_CHK:
11688 case BUILT_IN_STPCPY_CHK:
11689 /* For __strcat_chk the warning will be emitted only if overflowing
11690 by at least strlen (dest) + 1 bytes. */
11691 case BUILT_IN_STRCAT_CHK:
11692 len = CALL_EXPR_ARG (exp, 1);
11693 size = CALL_EXPR_ARG (exp, 2);
11694 is_strlen = 1;
11695 break;
11696 case BUILT_IN_STRNCAT_CHK:
11697 case BUILT_IN_STRNCPY_CHK:
11698 len = CALL_EXPR_ARG (exp, 2);
11699 size = CALL_EXPR_ARG (exp, 3);
11700 break;
11701 case BUILT_IN_SNPRINTF_CHK:
11702 case BUILT_IN_VSNPRINTF_CHK:
11703 len = CALL_EXPR_ARG (exp, 1);
11704 size = CALL_EXPR_ARG (exp, 3);
11705 break;
11706 default:
11707 gcc_unreachable ();
11710 if (!len || !size)
11711 return;
11713 if (! host_integerp (size, 1) || integer_all_onesp (size))
11714 return;
11716 if (is_strlen)
11718 len = c_strlen (len, 1);
11719 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11720 return;
11722 else if (fcode == BUILT_IN_STRNCAT_CHK)
11724 tree src = CALL_EXPR_ARG (exp, 1);
11725 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11726 return;
11727 src = c_strlen (src, 1);
11728 if (! src || ! host_integerp (src, 1))
11730 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11731 exp, get_callee_fndecl (exp));
11732 return;
11734 else if (tree_int_cst_lt (src, size))
11735 return;
11737 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11738 return;
11740 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11741 exp, get_callee_fndecl (exp));
11744 /* Emit warning if a buffer overflow is detected at compile time
11745 in __sprintf_chk/__vsprintf_chk calls. */
11747 static void
11748 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11750 tree size, len, fmt;
11751 const char *fmt_str;
11752 int nargs = call_expr_nargs (exp);
11754 /* Verify the required arguments in the original call. */
11756 if (nargs < 4)
11757 return;
11758 size = CALL_EXPR_ARG (exp, 2);
11759 fmt = CALL_EXPR_ARG (exp, 3);
11761 if (! host_integerp (size, 1) || integer_all_onesp (size))
11762 return;
11764 /* Check whether the format is a literal string constant. */
11765 fmt_str = c_getstr (fmt);
11766 if (fmt_str == NULL)
11767 return;
11769 if (!init_target_chars ())
11770 return;
11772 /* If the format doesn't contain % args or %%, we know its size. */
11773 if (strchr (fmt_str, target_percent) == 0)
11774 len = build_int_cstu (size_type_node, strlen (fmt_str));
11775 /* If the format is "%s" and first ... argument is a string literal,
11776 we know it too. */
11777 else if (fcode == BUILT_IN_SPRINTF_CHK
11778 && strcmp (fmt_str, target_percent_s) == 0)
11780 tree arg;
11782 if (nargs < 5)
11783 return;
11784 arg = CALL_EXPR_ARG (exp, 4);
11785 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11786 return;
11788 len = c_strlen (arg, 1);
11789 if (!len || ! host_integerp (len, 1))
11790 return;
11792 else
11793 return;
11795 if (! tree_int_cst_lt (len, size))
11796 warning_at (tree_nonartificial_location (exp),
11797 0, "%Kcall to %D will always overflow destination buffer",
11798 exp, get_callee_fndecl (exp));
11801 /* Emit warning if a free is called with address of a variable. */
11803 static void
11804 maybe_emit_free_warning (tree exp)
11806 tree arg = CALL_EXPR_ARG (exp, 0);
11808 STRIP_NOPS (arg);
11809 if (TREE_CODE (arg) != ADDR_EXPR)
11810 return;
11812 arg = get_base_address (TREE_OPERAND (arg, 0));
11813 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11814 return;
11816 if (SSA_VAR_P (arg))
11817 warning_at (tree_nonartificial_location (exp),
11818 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11819 else
11820 warning_at (tree_nonartificial_location (exp),
11821 0, "%Kattempt to free a non-heap object", exp);
11824 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11825 if possible. */
11827 tree
11828 fold_builtin_object_size (tree ptr, tree ost)
11830 unsigned HOST_WIDE_INT bytes;
11831 int object_size_type;
11833 if (!validate_arg (ptr, POINTER_TYPE)
11834 || !validate_arg (ost, INTEGER_TYPE))
11835 return NULL_TREE;
11837 STRIP_NOPS (ost);
11839 if (TREE_CODE (ost) != INTEGER_CST
11840 || tree_int_cst_sgn (ost) < 0
11841 || compare_tree_int (ost, 3) > 0)
11842 return NULL_TREE;
11844 object_size_type = tree_low_cst (ost, 0);
11846 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11847 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11848 and (size_t) 0 for types 2 and 3. */
11849 if (TREE_SIDE_EFFECTS (ptr))
11850 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11852 if (TREE_CODE (ptr) == ADDR_EXPR)
11854 bytes = compute_builtin_object_size (ptr, object_size_type);
11855 if (double_int_fits_to_tree_p (size_type_node,
11856 uhwi_to_double_int (bytes)))
11857 return build_int_cstu (size_type_node, bytes);
11859 else if (TREE_CODE (ptr) == SSA_NAME)
11861 /* If object size is not known yet, delay folding until
11862 later. Maybe subsequent passes will help determining
11863 it. */
11864 bytes = compute_builtin_object_size (ptr, object_size_type);
11865 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11866 && double_int_fits_to_tree_p (size_type_node,
11867 uhwi_to_double_int (bytes)))
11868 return build_int_cstu (size_type_node, bytes);
11871 return NULL_TREE;
11874 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11875 DEST, SRC, LEN, and SIZE are the arguments to the call.
11876 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11877 code of the builtin. If MAXLEN is not NULL, it is maximum length
11878 passed as third argument. */
11880 tree
11881 fold_builtin_memory_chk (location_t loc, tree fndecl,
11882 tree dest, tree src, tree len, tree size,
11883 tree maxlen, bool ignore,
11884 enum built_in_function fcode)
11886 tree fn;
11888 if (!validate_arg (dest, POINTER_TYPE)
11889 || !validate_arg (src,
11890 (fcode == BUILT_IN_MEMSET_CHK
11891 ? INTEGER_TYPE : POINTER_TYPE))
11892 || !validate_arg (len, INTEGER_TYPE)
11893 || !validate_arg (size, INTEGER_TYPE))
11894 return NULL_TREE;
11896 /* If SRC and DEST are the same (and not volatile), return DEST
11897 (resp. DEST+LEN for __mempcpy_chk). */
11898 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11900 if (fcode != BUILT_IN_MEMPCPY_CHK)
11901 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
11902 dest, len);
11903 else
11905 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
11906 dest, len);
11907 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
11911 if (! host_integerp (size, 1))
11912 return NULL_TREE;
11914 if (! integer_all_onesp (size))
11916 if (! host_integerp (len, 1))
11918 /* If LEN is not constant, try MAXLEN too.
11919 For MAXLEN only allow optimizing into non-_ocs function
11920 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11921 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11923 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11925 /* (void) __mempcpy_chk () can be optimized into
11926 (void) __memcpy_chk (). */
11927 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11928 if (!fn)
11929 return NULL_TREE;
11931 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
11933 return NULL_TREE;
11936 else
11937 maxlen = len;
11939 if (tree_int_cst_lt (size, maxlen))
11940 return NULL_TREE;
11943 fn = NULL_TREE;
11944 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11945 mem{cpy,pcpy,move,set} is available. */
11946 switch (fcode)
11948 case BUILT_IN_MEMCPY_CHK:
11949 fn = built_in_decls[BUILT_IN_MEMCPY];
11950 break;
11951 case BUILT_IN_MEMPCPY_CHK:
11952 fn = built_in_decls[BUILT_IN_MEMPCPY];
11953 break;
11954 case BUILT_IN_MEMMOVE_CHK:
11955 fn = built_in_decls[BUILT_IN_MEMMOVE];
11956 break;
11957 case BUILT_IN_MEMSET_CHK:
11958 fn = built_in_decls[BUILT_IN_MEMSET];
11959 break;
11960 default:
11961 break;
11964 if (!fn)
11965 return NULL_TREE;
11967 return build_call_expr_loc (loc, fn, 3, dest, src, len);
11970 /* Fold a call to the __st[rp]cpy_chk builtin.
11971 DEST, SRC, and SIZE are the arguments to the call.
11972 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11973 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11974 strings passed as second argument. */
11976 tree
11977 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
11978 tree src, tree size,
11979 tree maxlen, bool ignore,
11980 enum built_in_function fcode)
11982 tree len, fn;
11984 if (!validate_arg (dest, POINTER_TYPE)
11985 || !validate_arg (src, POINTER_TYPE)
11986 || !validate_arg (size, INTEGER_TYPE))
11987 return NULL_TREE;
11989 /* If SRC and DEST are the same (and not volatile), return DEST. */
11990 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
11991 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
11993 if (! host_integerp (size, 1))
11994 return NULL_TREE;
11996 if (! integer_all_onesp (size))
11998 len = c_strlen (src, 1);
11999 if (! len || ! host_integerp (len, 1))
12001 /* If LEN is not constant, try MAXLEN too.
12002 For MAXLEN only allow optimizing into non-_ocs function
12003 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12004 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12006 if (fcode == BUILT_IN_STPCPY_CHK)
12008 if (! ignore)
12009 return NULL_TREE;
12011 /* If return value of __stpcpy_chk is ignored,
12012 optimize into __strcpy_chk. */
12013 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12014 if (!fn)
12015 return NULL_TREE;
12017 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12020 if (! len || TREE_SIDE_EFFECTS (len))
12021 return NULL_TREE;
12023 /* If c_strlen returned something, but not a constant,
12024 transform __strcpy_chk into __memcpy_chk. */
12025 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12026 if (!fn)
12027 return NULL_TREE;
12029 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12030 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12031 build_call_expr_loc (loc, fn, 4,
12032 dest, src, len, size));
12035 else
12036 maxlen = len;
12038 if (! tree_int_cst_lt (maxlen, size))
12039 return NULL_TREE;
12042 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12043 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12044 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12045 if (!fn)
12046 return NULL_TREE;
12048 return build_call_expr_loc (loc, fn, 2, dest, src);
12051 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12052 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12053 length passed as third argument. */
12055 tree
12056 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12057 tree len, tree size, tree maxlen)
12059 tree fn;
12061 if (!validate_arg (dest, POINTER_TYPE)
12062 || !validate_arg (src, POINTER_TYPE)
12063 || !validate_arg (len, INTEGER_TYPE)
12064 || !validate_arg (size, INTEGER_TYPE))
12065 return NULL_TREE;
12067 if (! host_integerp (size, 1))
12068 return NULL_TREE;
12070 if (! integer_all_onesp (size))
12072 if (! host_integerp (len, 1))
12074 /* If LEN is not constant, try MAXLEN too.
12075 For MAXLEN only allow optimizing into non-_ocs function
12076 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12077 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12078 return NULL_TREE;
12080 else
12081 maxlen = len;
12083 if (tree_int_cst_lt (size, maxlen))
12084 return NULL_TREE;
12087 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12088 fn = built_in_decls[BUILT_IN_STRNCPY];
12089 if (!fn)
12090 return NULL_TREE;
12092 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12095 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12096 are the arguments to the call. */
12098 static tree
12099 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12100 tree src, tree size)
12102 tree fn;
12103 const char *p;
12105 if (!validate_arg (dest, POINTER_TYPE)
12106 || !validate_arg (src, POINTER_TYPE)
12107 || !validate_arg (size, INTEGER_TYPE))
12108 return NULL_TREE;
12110 p = c_getstr (src);
12111 /* If the SRC parameter is "", return DEST. */
12112 if (p && *p == '\0')
12113 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12115 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12116 return NULL_TREE;
12118 /* If __builtin_strcat_chk is used, assume strcat is available. */
12119 fn = built_in_decls[BUILT_IN_STRCAT];
12120 if (!fn)
12121 return NULL_TREE;
12123 return build_call_expr_loc (loc, fn, 2, dest, src);
12126 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12127 LEN, and SIZE. */
12129 static tree
12130 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12131 tree dest, tree src, tree len, tree size)
12133 tree fn;
12134 const char *p;
12136 if (!validate_arg (dest, POINTER_TYPE)
12137 || !validate_arg (src, POINTER_TYPE)
12138 || !validate_arg (size, INTEGER_TYPE)
12139 || !validate_arg (size, INTEGER_TYPE))
12140 return NULL_TREE;
12142 p = c_getstr (src);
12143 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12144 if (p && *p == '\0')
12145 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12146 else if (integer_zerop (len))
12147 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12149 if (! host_integerp (size, 1))
12150 return NULL_TREE;
12152 if (! integer_all_onesp (size))
12154 tree src_len = c_strlen (src, 1);
12155 if (src_len
12156 && host_integerp (src_len, 1)
12157 && host_integerp (len, 1)
12158 && ! tree_int_cst_lt (len, src_len))
12160 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12161 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12162 if (!fn)
12163 return NULL_TREE;
12165 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12167 return NULL_TREE;
12170 /* If __builtin_strncat_chk is used, assume strncat is available. */
12171 fn = built_in_decls[BUILT_IN_STRNCAT];
12172 if (!fn)
12173 return NULL_TREE;
12175 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12178 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12179 Return NULL_TREE if a normal call should be emitted rather than
12180 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12181 or BUILT_IN_VSPRINTF_CHK. */
12183 static tree
12184 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12185 enum built_in_function fcode)
12187 tree dest, size, len, fn, fmt, flag;
12188 const char *fmt_str;
12190 /* Verify the required arguments in the original call. */
12191 if (nargs < 4)
12192 return NULL_TREE;
12193 dest = args[0];
12194 if (!validate_arg (dest, POINTER_TYPE))
12195 return NULL_TREE;
12196 flag = args[1];
12197 if (!validate_arg (flag, INTEGER_TYPE))
12198 return NULL_TREE;
12199 size = args[2];
12200 if (!validate_arg (size, INTEGER_TYPE))
12201 return NULL_TREE;
12202 fmt = args[3];
12203 if (!validate_arg (fmt, POINTER_TYPE))
12204 return NULL_TREE;
12206 if (! host_integerp (size, 1))
12207 return NULL_TREE;
12209 len = NULL_TREE;
12211 if (!init_target_chars ())
12212 return NULL_TREE;
12214 /* Check whether the format is a literal string constant. */
12215 fmt_str = c_getstr (fmt);
12216 if (fmt_str != NULL)
12218 /* If the format doesn't contain % args or %%, we know the size. */
12219 if (strchr (fmt_str, target_percent) == 0)
12221 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12222 len = build_int_cstu (size_type_node, strlen (fmt_str));
12224 /* If the format is "%s" and first ... argument is a string literal,
12225 we know the size too. */
12226 else if (fcode == BUILT_IN_SPRINTF_CHK
12227 && strcmp (fmt_str, target_percent_s) == 0)
12229 tree arg;
12231 if (nargs == 5)
12233 arg = args[4];
12234 if (validate_arg (arg, POINTER_TYPE))
12236 len = c_strlen (arg, 1);
12237 if (! len || ! host_integerp (len, 1))
12238 len = NULL_TREE;
12244 if (! integer_all_onesp (size))
12246 if (! len || ! tree_int_cst_lt (len, size))
12247 return NULL_TREE;
12250 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12251 or if format doesn't contain % chars or is "%s". */
12252 if (! integer_zerop (flag))
12254 if (fmt_str == NULL)
12255 return NULL_TREE;
12256 if (strchr (fmt_str, target_percent) != NULL
12257 && strcmp (fmt_str, target_percent_s))
12258 return NULL_TREE;
12261 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12262 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12263 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12264 if (!fn)
12265 return NULL_TREE;
12267 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
12270 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12271 a normal call should be emitted rather than expanding the function
12272 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12274 static tree
12275 fold_builtin_sprintf_chk (location_t loc, tree exp,
12276 enum built_in_function fcode)
12278 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
12279 CALL_EXPR_ARGP (exp), fcode);
12282 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
12283 NULL_TREE if a normal call should be emitted rather than expanding
12284 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12285 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12286 passed as second argument. */
12288 static tree
12289 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
12290 tree maxlen, enum built_in_function fcode)
12292 tree dest, size, len, fn, fmt, flag;
12293 const char *fmt_str;
12295 /* Verify the required arguments in the original call. */
12296 if (nargs < 5)
12297 return NULL_TREE;
12298 dest = args[0];
12299 if (!validate_arg (dest, POINTER_TYPE))
12300 return NULL_TREE;
12301 len = args[1];
12302 if (!validate_arg (len, INTEGER_TYPE))
12303 return NULL_TREE;
12304 flag = args[2];
12305 if (!validate_arg (flag, INTEGER_TYPE))
12306 return NULL_TREE;
12307 size = args[3];
12308 if (!validate_arg (size, INTEGER_TYPE))
12309 return NULL_TREE;
12310 fmt = args[4];
12311 if (!validate_arg (fmt, POINTER_TYPE))
12312 return NULL_TREE;
12314 if (! host_integerp (size, 1))
12315 return NULL_TREE;
12317 if (! integer_all_onesp (size))
12319 if (! host_integerp (len, 1))
12321 /* If LEN is not constant, try MAXLEN too.
12322 For MAXLEN only allow optimizing into non-_ocs function
12323 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12324 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12325 return NULL_TREE;
12327 else
12328 maxlen = len;
12330 if (tree_int_cst_lt (size, maxlen))
12331 return NULL_TREE;
12334 if (!init_target_chars ())
12335 return NULL_TREE;
12337 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12338 or if format doesn't contain % chars or is "%s". */
12339 if (! integer_zerop (flag))
12341 fmt_str = c_getstr (fmt);
12342 if (fmt_str == NULL)
12343 return NULL_TREE;
12344 if (strchr (fmt_str, target_percent) != NULL
12345 && strcmp (fmt_str, target_percent_s))
12346 return NULL_TREE;
12349 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12350 available. */
12351 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12352 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12353 if (!fn)
12354 return NULL_TREE;
12356 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
12359 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12360 a normal call should be emitted rather than expanding the function
12361 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12362 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12363 passed as second argument. */
12365 tree
12366 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12367 enum built_in_function fcode)
12369 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
12370 CALL_EXPR_ARGP (exp), maxlen, fcode);
12373 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12374 FMT and ARG are the arguments to the call; we don't fold cases with
12375 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12377 Return NULL_TREE if no simplification was possible, otherwise return the
12378 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12379 code of the function to be simplified. */
12381 static tree
12382 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12383 tree arg, bool ignore,
12384 enum built_in_function fcode)
12386 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12387 const char *fmt_str = NULL;
12389 /* If the return value is used, don't do the transformation. */
12390 if (! ignore)
12391 return NULL_TREE;
12393 /* Verify the required arguments in the original call. */
12394 if (!validate_arg (fmt, POINTER_TYPE))
12395 return NULL_TREE;
12397 /* Check whether the format is a literal string constant. */
12398 fmt_str = c_getstr (fmt);
12399 if (fmt_str == NULL)
12400 return NULL_TREE;
12402 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12404 /* If we're using an unlocked function, assume the other
12405 unlocked functions exist explicitly. */
12406 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12407 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12409 else
12411 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12412 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12415 if (!init_target_chars ())
12416 return NULL_TREE;
12418 if (strcmp (fmt_str, target_percent_s) == 0
12419 || strchr (fmt_str, target_percent) == NULL)
12421 const char *str;
12423 if (strcmp (fmt_str, target_percent_s) == 0)
12425 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12426 return NULL_TREE;
12428 if (!arg || !validate_arg (arg, POINTER_TYPE))
12429 return NULL_TREE;
12431 str = c_getstr (arg);
12432 if (str == NULL)
12433 return NULL_TREE;
12435 else
12437 /* The format specifier doesn't contain any '%' characters. */
12438 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12439 && arg)
12440 return NULL_TREE;
12441 str = fmt_str;
12444 /* If the string was "", printf does nothing. */
12445 if (str[0] == '\0')
12446 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12448 /* If the string has length of 1, call putchar. */
12449 if (str[1] == '\0')
12451 /* Given printf("c"), (where c is any one character,)
12452 convert "c"[0] to an int and pass that to the replacement
12453 function. */
12454 newarg = build_int_cst (integer_type_node, str[0]);
12455 if (fn_putchar)
12456 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12458 else
12460 /* If the string was "string\n", call puts("string"). */
12461 size_t len = strlen (str);
12462 if ((unsigned char)str[len - 1] == target_newline
12463 && (size_t) (int) len == len
12464 && (int) len > 0)
12466 char *newstr;
12467 tree offset_node, string_cst;
12469 /* Create a NUL-terminated string that's one char shorter
12470 than the original, stripping off the trailing '\n'. */
12471 newarg = build_string_literal (len, str);
12472 string_cst = string_constant (newarg, &offset_node);
12473 gcc_checking_assert (string_cst
12474 && (TREE_STRING_LENGTH (string_cst)
12475 == (int) len)
12476 && integer_zerop (offset_node)
12477 && (unsigned char)
12478 TREE_STRING_POINTER (string_cst)[len - 1]
12479 == target_newline);
12480 /* build_string_literal creates a new STRING_CST,
12481 modify it in place to avoid double copying. */
12482 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
12483 newstr[len - 1] = '\0';
12484 if (fn_puts)
12485 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12487 else
12488 /* We'd like to arrange to call fputs(string,stdout) here,
12489 but we need stdout and don't have a way to get it yet. */
12490 return NULL_TREE;
12494 /* The other optimizations can be done only on the non-va_list variants. */
12495 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12496 return NULL_TREE;
12498 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12499 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12501 if (!arg || !validate_arg (arg, POINTER_TYPE))
12502 return NULL_TREE;
12503 if (fn_puts)
12504 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12507 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12508 else if (strcmp (fmt_str, target_percent_c) == 0)
12510 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12511 return NULL_TREE;
12512 if (fn_putchar)
12513 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12516 if (!call)
12517 return NULL_TREE;
12519 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12522 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12523 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12524 more than 3 arguments, and ARG may be null in the 2-argument case.
12526 Return NULL_TREE if no simplification was possible, otherwise return the
12527 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12528 code of the function to be simplified. */
12530 static tree
12531 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12532 tree fmt, tree arg, bool ignore,
12533 enum built_in_function fcode)
12535 tree fn_fputc, fn_fputs, call = NULL_TREE;
12536 const char *fmt_str = NULL;
12538 /* If the return value is used, don't do the transformation. */
12539 if (! ignore)
12540 return NULL_TREE;
12542 /* Verify the required arguments in the original call. */
12543 if (!validate_arg (fp, POINTER_TYPE))
12544 return NULL_TREE;
12545 if (!validate_arg (fmt, POINTER_TYPE))
12546 return NULL_TREE;
12548 /* Check whether the format is a literal string constant. */
12549 fmt_str = c_getstr (fmt);
12550 if (fmt_str == NULL)
12551 return NULL_TREE;
12553 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12555 /* If we're using an unlocked function, assume the other
12556 unlocked functions exist explicitly. */
12557 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12558 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12560 else
12562 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12563 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12566 if (!init_target_chars ())
12567 return NULL_TREE;
12569 /* If the format doesn't contain % args or %%, use strcpy. */
12570 if (strchr (fmt_str, target_percent) == NULL)
12572 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12573 && arg)
12574 return NULL_TREE;
12576 /* If the format specifier was "", fprintf does nothing. */
12577 if (fmt_str[0] == '\0')
12579 /* If FP has side-effects, just wait until gimplification is
12580 done. */
12581 if (TREE_SIDE_EFFECTS (fp))
12582 return NULL_TREE;
12584 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12587 /* When "string" doesn't contain %, replace all cases of
12588 fprintf (fp, string) with fputs (string, fp). The fputs
12589 builtin will take care of special cases like length == 1. */
12590 if (fn_fputs)
12591 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12594 /* The other optimizations can be done only on the non-va_list variants. */
12595 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12596 return NULL_TREE;
12598 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12599 else if (strcmp (fmt_str, target_percent_s) == 0)
12601 if (!arg || !validate_arg (arg, POINTER_TYPE))
12602 return NULL_TREE;
12603 if (fn_fputs)
12604 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12607 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12608 else if (strcmp (fmt_str, target_percent_c) == 0)
12610 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12611 return NULL_TREE;
12612 if (fn_fputc)
12613 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12616 if (!call)
12617 return NULL_TREE;
12618 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12621 /* Initialize format string characters in the target charset. */
12623 static bool
12624 init_target_chars (void)
12626 static bool init;
12627 if (!init)
12629 target_newline = lang_hooks.to_target_charset ('\n');
12630 target_percent = lang_hooks.to_target_charset ('%');
12631 target_c = lang_hooks.to_target_charset ('c');
12632 target_s = lang_hooks.to_target_charset ('s');
12633 if (target_newline == 0 || target_percent == 0 || target_c == 0
12634 || target_s == 0)
12635 return false;
12637 target_percent_c[0] = target_percent;
12638 target_percent_c[1] = target_c;
12639 target_percent_c[2] = '\0';
12641 target_percent_s[0] = target_percent;
12642 target_percent_s[1] = target_s;
12643 target_percent_s[2] = '\0';
12645 target_percent_s_newline[0] = target_percent;
12646 target_percent_s_newline[1] = target_s;
12647 target_percent_s_newline[2] = target_newline;
12648 target_percent_s_newline[3] = '\0';
12650 init = true;
12652 return true;
12655 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12656 and no overflow/underflow occurred. INEXACT is true if M was not
12657 exactly calculated. TYPE is the tree type for the result. This
12658 function assumes that you cleared the MPFR flags and then
12659 calculated M to see if anything subsequently set a flag prior to
12660 entering this function. Return NULL_TREE if any checks fail. */
12662 static tree
12663 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12665 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12666 overflow/underflow occurred. If -frounding-math, proceed iff the
12667 result of calling FUNC was exact. */
12668 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12669 && (!flag_rounding_math || !inexact))
12671 REAL_VALUE_TYPE rr;
12673 real_from_mpfr (&rr, m, type, GMP_RNDN);
12674 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12675 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12676 but the mpft_t is not, then we underflowed in the
12677 conversion. */
12678 if (real_isfinite (&rr)
12679 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12681 REAL_VALUE_TYPE rmode;
12683 real_convert (&rmode, TYPE_MODE (type), &rr);
12684 /* Proceed iff the specified mode can hold the value. */
12685 if (real_identical (&rmode, &rr))
12686 return build_real (type, rmode);
12689 return NULL_TREE;
12692 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12693 number and no overflow/underflow occurred. INEXACT is true if M
12694 was not exactly calculated. TYPE is the tree type for the result.
12695 This function assumes that you cleared the MPFR flags and then
12696 calculated M to see if anything subsequently set a flag prior to
12697 entering this function. Return NULL_TREE if any checks fail, if
12698 FORCE_CONVERT is true, then bypass the checks. */
12700 static tree
12701 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12703 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12704 overflow/underflow occurred. If -frounding-math, proceed iff the
12705 result of calling FUNC was exact. */
12706 if (force_convert
12707 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12708 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12709 && (!flag_rounding_math || !inexact)))
12711 REAL_VALUE_TYPE re, im;
12713 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12714 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12715 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12716 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12717 but the mpft_t is not, then we underflowed in the
12718 conversion. */
12719 if (force_convert
12720 || (real_isfinite (&re) && real_isfinite (&im)
12721 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12722 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12724 REAL_VALUE_TYPE re_mode, im_mode;
12726 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12727 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12728 /* Proceed iff the specified mode can hold the value. */
12729 if (force_convert
12730 || (real_identical (&re_mode, &re)
12731 && real_identical (&im_mode, &im)))
12732 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12733 build_real (TREE_TYPE (type), im_mode));
12736 return NULL_TREE;
12739 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12740 FUNC on it and return the resulting value as a tree with type TYPE.
12741 If MIN and/or MAX are not NULL, then the supplied ARG must be
12742 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12743 acceptable values, otherwise they are not. The mpfr precision is
12744 set to the precision of TYPE. We assume that function FUNC returns
12745 zero if the result could be calculated exactly within the requested
12746 precision. */
12748 static tree
12749 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12750 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12751 bool inclusive)
12753 tree result = NULL_TREE;
12755 STRIP_NOPS (arg);
12757 /* To proceed, MPFR must exactly represent the target floating point
12758 format, which only happens when the target base equals two. */
12759 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12760 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12762 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12764 if (real_isfinite (ra)
12765 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12766 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12768 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12769 const int prec = fmt->p;
12770 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12771 int inexact;
12772 mpfr_t m;
12774 mpfr_init2 (m, prec);
12775 mpfr_from_real (m, ra, GMP_RNDN);
12776 mpfr_clear_flags ();
12777 inexact = func (m, m, rnd);
12778 result = do_mpfr_ckconv (m, type, inexact);
12779 mpfr_clear (m);
12783 return result;
12786 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12787 FUNC on it and return the resulting value as a tree with type TYPE.
12788 The mpfr precision is set to the precision of TYPE. We assume that
12789 function FUNC returns zero if the result could be calculated
12790 exactly within the requested precision. */
12792 static tree
12793 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12794 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12796 tree result = NULL_TREE;
12798 STRIP_NOPS (arg1);
12799 STRIP_NOPS (arg2);
12801 /* To proceed, MPFR must exactly represent the target floating point
12802 format, which only happens when the target base equals two. */
12803 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12804 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12805 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12807 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12808 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12810 if (real_isfinite (ra1) && real_isfinite (ra2))
12812 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12813 const int prec = fmt->p;
12814 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12815 int inexact;
12816 mpfr_t m1, m2;
12818 mpfr_inits2 (prec, m1, m2, NULL);
12819 mpfr_from_real (m1, ra1, GMP_RNDN);
12820 mpfr_from_real (m2, ra2, GMP_RNDN);
12821 mpfr_clear_flags ();
12822 inexact = func (m1, m1, m2, rnd);
12823 result = do_mpfr_ckconv (m1, type, inexact);
12824 mpfr_clears (m1, m2, NULL);
12828 return result;
12831 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12832 FUNC on it and return the resulting value as a tree with type TYPE.
12833 The mpfr precision is set to the precision of TYPE. We assume that
12834 function FUNC returns zero if the result could be calculated
12835 exactly within the requested precision. */
12837 static tree
12838 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12839 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12841 tree result = NULL_TREE;
12843 STRIP_NOPS (arg1);
12844 STRIP_NOPS (arg2);
12845 STRIP_NOPS (arg3);
12847 /* To proceed, MPFR must exactly represent the target floating point
12848 format, which only happens when the target base equals two. */
12849 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12850 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12851 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12852 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12854 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12855 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12856 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12858 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12860 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12861 const int prec = fmt->p;
12862 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12863 int inexact;
12864 mpfr_t m1, m2, m3;
12866 mpfr_inits2 (prec, m1, m2, m3, NULL);
12867 mpfr_from_real (m1, ra1, GMP_RNDN);
12868 mpfr_from_real (m2, ra2, GMP_RNDN);
12869 mpfr_from_real (m3, ra3, GMP_RNDN);
12870 mpfr_clear_flags ();
12871 inexact = func (m1, m1, m2, m3, rnd);
12872 result = do_mpfr_ckconv (m1, type, inexact);
12873 mpfr_clears (m1, m2, m3, NULL);
12877 return result;
12880 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12881 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12882 If ARG_SINP and ARG_COSP are NULL then the result is returned
12883 as a complex value.
12884 The type is taken from the type of ARG and is used for setting the
12885 precision of the calculation and results. */
12887 static tree
12888 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12890 tree const type = TREE_TYPE (arg);
12891 tree result = NULL_TREE;
12893 STRIP_NOPS (arg);
12895 /* To proceed, MPFR must exactly represent the target floating point
12896 format, which only happens when the target base equals two. */
12897 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12898 && TREE_CODE (arg) == REAL_CST
12899 && !TREE_OVERFLOW (arg))
12901 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12903 if (real_isfinite (ra))
12905 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12906 const int prec = fmt->p;
12907 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12908 tree result_s, result_c;
12909 int inexact;
12910 mpfr_t m, ms, mc;
12912 mpfr_inits2 (prec, m, ms, mc, NULL);
12913 mpfr_from_real (m, ra, GMP_RNDN);
12914 mpfr_clear_flags ();
12915 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12916 result_s = do_mpfr_ckconv (ms, type, inexact);
12917 result_c = do_mpfr_ckconv (mc, type, inexact);
12918 mpfr_clears (m, ms, mc, NULL);
12919 if (result_s && result_c)
12921 /* If we are to return in a complex value do so. */
12922 if (!arg_sinp && !arg_cosp)
12923 return build_complex (build_complex_type (type),
12924 result_c, result_s);
12926 /* Dereference the sin/cos pointer arguments. */
12927 arg_sinp = build_fold_indirect_ref (arg_sinp);
12928 arg_cosp = build_fold_indirect_ref (arg_cosp);
12929 /* Proceed if valid pointer type were passed in. */
12930 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12931 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12933 /* Set the values. */
12934 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12935 result_s);
12936 TREE_SIDE_EFFECTS (result_s) = 1;
12937 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12938 result_c);
12939 TREE_SIDE_EFFECTS (result_c) = 1;
12940 /* Combine the assignments into a compound expr. */
12941 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12942 result_s, result_c));
12947 return result;
12950 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12951 two-argument mpfr order N Bessel function FUNC on them and return
12952 the resulting value as a tree with type TYPE. The mpfr precision
12953 is set to the precision of TYPE. We assume that function FUNC
12954 returns zero if the result could be calculated exactly within the
12955 requested precision. */
12956 static tree
12957 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12958 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12959 const REAL_VALUE_TYPE *min, bool inclusive)
12961 tree result = NULL_TREE;
12963 STRIP_NOPS (arg1);
12964 STRIP_NOPS (arg2);
12966 /* To proceed, MPFR must exactly represent the target floating point
12967 format, which only happens when the target base equals two. */
12968 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12969 && host_integerp (arg1, 0)
12970 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12972 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12973 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12975 if (n == (long)n
12976 && real_isfinite (ra)
12977 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12979 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12980 const int prec = fmt->p;
12981 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12982 int inexact;
12983 mpfr_t m;
12985 mpfr_init2 (m, prec);
12986 mpfr_from_real (m, ra, GMP_RNDN);
12987 mpfr_clear_flags ();
12988 inexact = func (m, n, m, rnd);
12989 result = do_mpfr_ckconv (m, type, inexact);
12990 mpfr_clear (m);
12994 return result;
12997 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12998 the pointer *(ARG_QUO) and return the result. The type is taken
12999 from the type of ARG0 and is used for setting the precision of the
13000 calculation and results. */
13002 static tree
13003 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13005 tree const type = TREE_TYPE (arg0);
13006 tree result = NULL_TREE;
13008 STRIP_NOPS (arg0);
13009 STRIP_NOPS (arg1);
13011 /* To proceed, MPFR must exactly represent the target floating point
13012 format, which only happens when the target base equals two. */
13013 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13014 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13015 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13017 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13018 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13020 if (real_isfinite (ra0) && real_isfinite (ra1))
13022 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13023 const int prec = fmt->p;
13024 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13025 tree result_rem;
13026 long integer_quo;
13027 mpfr_t m0, m1;
13029 mpfr_inits2 (prec, m0, m1, NULL);
13030 mpfr_from_real (m0, ra0, GMP_RNDN);
13031 mpfr_from_real (m1, ra1, GMP_RNDN);
13032 mpfr_clear_flags ();
13033 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13034 /* Remquo is independent of the rounding mode, so pass
13035 inexact=0 to do_mpfr_ckconv(). */
13036 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13037 mpfr_clears (m0, m1, NULL);
13038 if (result_rem)
13040 /* MPFR calculates quo in the host's long so it may
13041 return more bits in quo than the target int can hold
13042 if sizeof(host long) > sizeof(target int). This can
13043 happen even for native compilers in LP64 mode. In
13044 these cases, modulo the quo value with the largest
13045 number that the target int can hold while leaving one
13046 bit for the sign. */
13047 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13048 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13050 /* Dereference the quo pointer argument. */
13051 arg_quo = build_fold_indirect_ref (arg_quo);
13052 /* Proceed iff a valid pointer type was passed in. */
13053 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13055 /* Set the value. */
13056 tree result_quo
13057 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13058 build_int_cst (TREE_TYPE (arg_quo),
13059 integer_quo));
13060 TREE_SIDE_EFFECTS (result_quo) = 1;
13061 /* Combine the quo assignment with the rem. */
13062 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13063 result_quo, result_rem));
13068 return result;
13071 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13072 resulting value as a tree with type TYPE. The mpfr precision is
13073 set to the precision of TYPE. We assume that this mpfr function
13074 returns zero if the result could be calculated exactly within the
13075 requested precision. In addition, the integer pointer represented
13076 by ARG_SG will be dereferenced and set to the appropriate signgam
13077 (-1,1) value. */
13079 static tree
13080 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13082 tree result = NULL_TREE;
13084 STRIP_NOPS (arg);
13086 /* To proceed, MPFR must exactly represent the target floating point
13087 format, which only happens when the target base equals two. Also
13088 verify ARG is a constant and that ARG_SG is an int pointer. */
13089 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13090 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13091 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13092 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13094 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13096 /* In addition to NaN and Inf, the argument cannot be zero or a
13097 negative integer. */
13098 if (real_isfinite (ra)
13099 && ra->cl != rvc_zero
13100 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13102 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13103 const int prec = fmt->p;
13104 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13105 int inexact, sg;
13106 mpfr_t m;
13107 tree result_lg;
13109 mpfr_init2 (m, prec);
13110 mpfr_from_real (m, ra, GMP_RNDN);
13111 mpfr_clear_flags ();
13112 inexact = mpfr_lgamma (m, &sg, m, rnd);
13113 result_lg = do_mpfr_ckconv (m, type, inexact);
13114 mpfr_clear (m);
13115 if (result_lg)
13117 tree result_sg;
13119 /* Dereference the arg_sg pointer argument. */
13120 arg_sg = build_fold_indirect_ref (arg_sg);
13121 /* Assign the signgam value into *arg_sg. */
13122 result_sg = fold_build2 (MODIFY_EXPR,
13123 TREE_TYPE (arg_sg), arg_sg,
13124 build_int_cst (TREE_TYPE (arg_sg), sg));
13125 TREE_SIDE_EFFECTS (result_sg) = 1;
13126 /* Combine the signgam assignment with the lgamma result. */
13127 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13128 result_sg, result_lg));
13133 return result;
13136 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13137 function FUNC on it and return the resulting value as a tree with
13138 type TYPE. The mpfr precision is set to the precision of TYPE. We
13139 assume that function FUNC returns zero if the result could be
13140 calculated exactly within the requested precision. */
13142 static tree
13143 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13145 tree result = NULL_TREE;
13147 STRIP_NOPS (arg);
13149 /* To proceed, MPFR must exactly represent the target floating point
13150 format, which only happens when the target base equals two. */
13151 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13152 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13153 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13155 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13156 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13158 if (real_isfinite (re) && real_isfinite (im))
13160 const struct real_format *const fmt =
13161 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13162 const int prec = fmt->p;
13163 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13164 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13165 int inexact;
13166 mpc_t m;
13168 mpc_init2 (m, prec);
13169 mpfr_from_real (mpc_realref(m), re, rnd);
13170 mpfr_from_real (mpc_imagref(m), im, rnd);
13171 mpfr_clear_flags ();
13172 inexact = func (m, m, crnd);
13173 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13174 mpc_clear (m);
13178 return result;
13181 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13182 mpc function FUNC on it and return the resulting value as a tree
13183 with type TYPE. The mpfr precision is set to the precision of
13184 TYPE. We assume that function FUNC returns zero if the result
13185 could be calculated exactly within the requested precision. If
13186 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13187 in the arguments and/or results. */
13189 tree
13190 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13191 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13193 tree result = NULL_TREE;
13195 STRIP_NOPS (arg0);
13196 STRIP_NOPS (arg1);
13198 /* To proceed, MPFR must exactly represent the target floating point
13199 format, which only happens when the target base equals two. */
13200 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13201 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13202 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13203 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13204 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13206 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13207 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13208 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13209 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13211 if (do_nonfinite
13212 || (real_isfinite (re0) && real_isfinite (im0)
13213 && real_isfinite (re1) && real_isfinite (im1)))
13215 const struct real_format *const fmt =
13216 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13217 const int prec = fmt->p;
13218 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13219 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13220 int inexact;
13221 mpc_t m0, m1;
13223 mpc_init2 (m0, prec);
13224 mpc_init2 (m1, prec);
13225 mpfr_from_real (mpc_realref(m0), re0, rnd);
13226 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13227 mpfr_from_real (mpc_realref(m1), re1, rnd);
13228 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13229 mpfr_clear_flags ();
13230 inexact = func (m0, m0, m1, crnd);
13231 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13232 mpc_clear (m0);
13233 mpc_clear (m1);
13237 return result;
13240 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13241 a normal call should be emitted rather than expanding the function
13242 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13244 static tree
13245 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13247 int nargs = gimple_call_num_args (stmt);
13249 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
13250 (nargs > 0
13251 ? gimple_call_arg_ptr (stmt, 0)
13252 : &error_mark_node), fcode);
13255 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13256 a normal call should be emitted rather than expanding the function
13257 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13258 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13259 passed as second argument. */
13261 tree
13262 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13263 enum built_in_function fcode)
13265 int nargs = gimple_call_num_args (stmt);
13267 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
13268 (nargs > 0
13269 ? gimple_call_arg_ptr (stmt, 0)
13270 : &error_mark_node), maxlen, fcode);
13273 /* Builtins with folding operations that operate on "..." arguments
13274 need special handling; we need to store the arguments in a convenient
13275 data structure before attempting any folding. Fortunately there are
13276 only a few builtins that fall into this category. FNDECL is the
13277 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13278 result of the function call is ignored. */
13280 static tree
13281 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13282 bool ignore ATTRIBUTE_UNUSED)
13284 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13285 tree ret = NULL_TREE;
13287 switch (fcode)
13289 case BUILT_IN_SPRINTF_CHK:
13290 case BUILT_IN_VSPRINTF_CHK:
13291 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13292 break;
13294 case BUILT_IN_SNPRINTF_CHK:
13295 case BUILT_IN_VSNPRINTF_CHK:
13296 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13298 default:
13299 break;
13301 if (ret)
13303 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13304 TREE_NO_WARNING (ret) = 1;
13305 return ret;
13307 return NULL_TREE;
13310 /* A wrapper function for builtin folding that prevents warnings for
13311 "statement without effect" and the like, caused by removing the
13312 call node earlier than the warning is generated. */
13314 tree
13315 fold_call_stmt (gimple stmt, bool ignore)
13317 tree ret = NULL_TREE;
13318 tree fndecl = gimple_call_fndecl (stmt);
13319 location_t loc = gimple_location (stmt);
13320 if (fndecl
13321 && TREE_CODE (fndecl) == FUNCTION_DECL
13322 && DECL_BUILT_IN (fndecl)
13323 && !gimple_call_va_arg_pack_p (stmt))
13325 int nargs = gimple_call_num_args (stmt);
13326 tree *args = (nargs > 0
13327 ? gimple_call_arg_ptr (stmt, 0)
13328 : &error_mark_node);
13330 if (avoid_folding_inline_builtin (fndecl))
13331 return NULL_TREE;
13332 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13334 return targetm.fold_builtin (fndecl, nargs, args, ignore);
13336 else
13338 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13339 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13340 if (!ret)
13341 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13342 if (ret)
13344 /* Propagate location information from original call to
13345 expansion of builtin. Otherwise things like
13346 maybe_emit_chk_warning, that operate on the expansion
13347 of a builtin, will use the wrong location information. */
13348 if (gimple_has_location (stmt))
13350 tree realret = ret;
13351 if (TREE_CODE (ret) == NOP_EXPR)
13352 realret = TREE_OPERAND (ret, 0);
13353 if (CAN_HAVE_LOCATION_P (realret)
13354 && !EXPR_HAS_LOCATION (realret))
13355 SET_EXPR_LOCATION (realret, loc);
13356 return realret;
13358 return ret;
13362 return NULL_TREE;
13365 /* Look up the function in built_in_decls that corresponds to DECL
13366 and set ASMSPEC as its user assembler name. DECL must be a
13367 function decl that declares a builtin. */
13369 void
13370 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13372 tree builtin;
13373 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13374 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13375 && asmspec != 0);
13377 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13378 set_user_assembler_name (builtin, asmspec);
13379 switch (DECL_FUNCTION_CODE (decl))
13381 case BUILT_IN_MEMCPY:
13382 init_block_move_fn (asmspec);
13383 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13384 break;
13385 case BUILT_IN_MEMSET:
13386 init_block_clear_fn (asmspec);
13387 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13388 break;
13389 case BUILT_IN_MEMMOVE:
13390 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13391 break;
13392 case BUILT_IN_MEMCMP:
13393 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13394 break;
13395 case BUILT_IN_ABORT:
13396 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13397 break;
13398 case BUILT_IN_FFS:
13399 if (INT_TYPE_SIZE < BITS_PER_WORD)
13401 set_user_assembler_libfunc ("ffs", asmspec);
13402 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13403 MODE_INT, 0), "ffs");
13405 break;
13406 default:
13407 break;
13411 /* Return true if DECL is a builtin that expands to a constant or similarly
13412 simple code. */
13413 bool
13414 is_simple_builtin (tree decl)
13416 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13417 switch (DECL_FUNCTION_CODE (decl))
13419 /* Builtins that expand to constants. */
13420 case BUILT_IN_CONSTANT_P:
13421 case BUILT_IN_EXPECT:
13422 case BUILT_IN_OBJECT_SIZE:
13423 case BUILT_IN_UNREACHABLE:
13424 /* Simple register moves or loads from stack. */
13425 case BUILT_IN_RETURN_ADDRESS:
13426 case BUILT_IN_EXTRACT_RETURN_ADDR:
13427 case BUILT_IN_FROB_RETURN_ADDR:
13428 case BUILT_IN_RETURN:
13429 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13430 case BUILT_IN_FRAME_ADDRESS:
13431 case BUILT_IN_VA_END:
13432 case BUILT_IN_STACK_SAVE:
13433 case BUILT_IN_STACK_RESTORE:
13434 /* Exception state returns or moves registers around. */
13435 case BUILT_IN_EH_FILTER:
13436 case BUILT_IN_EH_POINTER:
13437 case BUILT_IN_EH_COPY_VALUES:
13438 return true;
13440 default:
13441 return false;
13444 return false;
13447 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13448 most probably expanded inline into reasonably simple code. This is a
13449 superset of is_simple_builtin. */
13450 bool
13451 is_inexpensive_builtin (tree decl)
13453 if (!decl)
13454 return false;
13455 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13456 return true;
13457 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13458 switch (DECL_FUNCTION_CODE (decl))
13460 case BUILT_IN_ABS:
13461 case BUILT_IN_ALLOCA:
13462 case BUILT_IN_BSWAP32:
13463 case BUILT_IN_BSWAP64:
13464 case BUILT_IN_CLZ:
13465 case BUILT_IN_CLZIMAX:
13466 case BUILT_IN_CLZL:
13467 case BUILT_IN_CLZLL:
13468 case BUILT_IN_CTZ:
13469 case BUILT_IN_CTZIMAX:
13470 case BUILT_IN_CTZL:
13471 case BUILT_IN_CTZLL:
13472 case BUILT_IN_FFS:
13473 case BUILT_IN_FFSIMAX:
13474 case BUILT_IN_FFSL:
13475 case BUILT_IN_FFSLL:
13476 case BUILT_IN_IMAXABS:
13477 case BUILT_IN_FINITE:
13478 case BUILT_IN_FINITEF:
13479 case BUILT_IN_FINITEL:
13480 case BUILT_IN_FINITED32:
13481 case BUILT_IN_FINITED64:
13482 case BUILT_IN_FINITED128:
13483 case BUILT_IN_FPCLASSIFY:
13484 case BUILT_IN_ISFINITE:
13485 case BUILT_IN_ISINF_SIGN:
13486 case BUILT_IN_ISINF:
13487 case BUILT_IN_ISINFF:
13488 case BUILT_IN_ISINFL:
13489 case BUILT_IN_ISINFD32:
13490 case BUILT_IN_ISINFD64:
13491 case BUILT_IN_ISINFD128:
13492 case BUILT_IN_ISNAN:
13493 case BUILT_IN_ISNANF:
13494 case BUILT_IN_ISNANL:
13495 case BUILT_IN_ISNAND32:
13496 case BUILT_IN_ISNAND64:
13497 case BUILT_IN_ISNAND128:
13498 case BUILT_IN_ISNORMAL:
13499 case BUILT_IN_ISGREATER:
13500 case BUILT_IN_ISGREATEREQUAL:
13501 case BUILT_IN_ISLESS:
13502 case BUILT_IN_ISLESSEQUAL:
13503 case BUILT_IN_ISLESSGREATER:
13504 case BUILT_IN_ISUNORDERED:
13505 case BUILT_IN_VA_ARG_PACK:
13506 case BUILT_IN_VA_ARG_PACK_LEN:
13507 case BUILT_IN_VA_COPY:
13508 case BUILT_IN_TRAP:
13509 case BUILT_IN_SAVEREGS:
13510 case BUILT_IN_POPCOUNTL:
13511 case BUILT_IN_POPCOUNTLL:
13512 case BUILT_IN_POPCOUNTIMAX:
13513 case BUILT_IN_POPCOUNT:
13514 case BUILT_IN_PARITYL:
13515 case BUILT_IN_PARITYLL:
13516 case BUILT_IN_PARITYIMAX:
13517 case BUILT_IN_PARITY:
13518 case BUILT_IN_LABS:
13519 case BUILT_IN_LLABS:
13520 case BUILT_IN_PREFETCH:
13521 return true;
13523 default:
13524 return is_simple_builtin (decl);
13527 return false;