* target-def.h (TARGET_HAVE_NAMED_SECTIONS): Move to
[official-gcc.git] / gcc / builtins.c
blobcf975e578a77c94a6414f6473cc0e6a13a3135a9
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
52 #include "builtins.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
57 #endif
58 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
60 struct target_builtins default_target_builtins;
61 #if SWITCHABLE_TARGET
62 struct target_builtins *this_target_builtins = &default_target_builtins;
63 #endif
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
74 #undef DEF_BUILTIN
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
92 #endif
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
106 static rtx expand_builtin_interclass_mathfn (tree, rtx);
107 static rtx expand_builtin_sincos (tree);
108 static rtx expand_builtin_cexpi (tree, rtx);
109 static rtx expand_builtin_int_roundingfn (tree, rtx);
110 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcmp (tree, rtx);
117 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_memcpy (tree, rtx);
120 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
122 enum machine_mode, int);
123 static rtx expand_builtin_strcpy (tree, rtx);
124 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
125 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strncpy (tree, rtx);
127 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
128 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
130 static rtx expand_builtin_bzero (tree);
131 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_alloca (tree, bool);
133 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
134 static rtx expand_builtin_frame_address (tree, tree);
135 static tree stabilize_va_list_loc (location_t, tree, int);
136 static rtx expand_builtin_expect (tree, rtx);
137 static tree fold_builtin_constant_p (tree);
138 static tree fold_builtin_expect (location_t, tree, tree);
139 static tree fold_builtin_classify_type (tree);
140 static tree fold_builtin_strlen (location_t, tree, tree);
141 static tree fold_builtin_inf (location_t, tree, int);
142 static tree fold_builtin_nan (tree, tree, int);
143 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
144 static bool validate_arg (const_tree, enum tree_code code);
145 static bool integer_valued_real_p (tree);
146 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
147 static bool readonly_data_expr (tree);
148 static rtx expand_builtin_fabs (tree, rtx, rtx);
149 static rtx expand_builtin_signbit (tree, rtx);
150 static tree fold_builtin_sqrt (location_t, tree, tree);
151 static tree fold_builtin_cbrt (location_t, tree, tree);
152 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
153 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
154 static tree fold_builtin_cos (location_t, tree, tree, tree);
155 static tree fold_builtin_cosh (location_t, tree, tree, tree);
156 static tree fold_builtin_tan (tree, tree);
157 static tree fold_builtin_trunc (location_t, tree, tree);
158 static tree fold_builtin_floor (location_t, tree, tree);
159 static tree fold_builtin_ceil (location_t, tree, tree);
160 static tree fold_builtin_round (location_t, tree, tree);
161 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
162 static tree fold_builtin_bitop (tree, tree);
163 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
164 static tree fold_builtin_strchr (location_t, tree, tree, tree);
165 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
167 static tree fold_builtin_strcmp (location_t, tree, tree);
168 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
169 static tree fold_builtin_signbit (location_t, tree, tree);
170 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_isascii (location_t, tree);
172 static tree fold_builtin_toascii (location_t, tree);
173 static tree fold_builtin_isdigit (location_t, tree);
174 static tree fold_builtin_fabs (location_t, tree, tree);
175 static tree fold_builtin_abs (location_t, tree, tree);
176 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
177 enum tree_code);
178 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
179 static tree fold_builtin_0 (location_t, tree, bool);
180 static tree fold_builtin_1 (location_t, tree, tree, bool);
181 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
182 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
183 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
184 static tree fold_builtin_varargs (location_t, tree, tree, bool);
186 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
187 static tree fold_builtin_strstr (location_t, tree, tree, tree);
188 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
189 static tree fold_builtin_strcat (location_t, tree, tree);
190 static tree fold_builtin_strncat (location_t, tree, tree, tree);
191 static tree fold_builtin_strspn (location_t, tree, tree);
192 static tree fold_builtin_strcspn (location_t, tree, tree);
193 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
194 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
196 static rtx expand_builtin_object_size (tree);
197 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
198 enum built_in_function);
199 static void maybe_emit_chk_warning (tree, enum built_in_function);
200 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
201 static void maybe_emit_free_warning (tree);
202 static tree fold_builtin_object_size (tree, tree);
203 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
204 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
205 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
206 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
207 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
208 enum built_in_function);
209 static bool init_target_chars (void);
211 static unsigned HOST_WIDE_INT target_newline;
212 static unsigned HOST_WIDE_INT target_percent;
213 static unsigned HOST_WIDE_INT target_c;
214 static unsigned HOST_WIDE_INT target_s;
215 static char target_percent_c[3];
216 static char target_percent_s[3];
217 static char target_percent_s_newline[4];
218 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
219 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
220 static tree do_mpfr_arg2 (tree, tree, tree,
221 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
222 static tree do_mpfr_arg3 (tree, tree, tree, tree,
223 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
224 static tree do_mpfr_sincos (tree, tree, tree);
225 static tree do_mpfr_bessel_n (tree, tree, tree,
226 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_remquo (tree, tree, tree);
229 static tree do_mpfr_lgamma_r (tree, tree, tree);
231 /* Return true if NAME starts with __builtin_ or __sync_. */
233 bool
234 is_builtin_name (const char *name)
236 if (strncmp (name, "__builtin_", 10) == 0)
237 return true;
238 if (strncmp (name, "__sync_", 7) == 0)
239 return true;
240 return false;
244 /* Return true if DECL is a function symbol representing a built-in. */
246 bool
247 is_builtin_fn (tree decl)
249 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
253 /* Return true if NODE should be considered for inline expansion regardless
254 of the optimization level. This means whenever a function is invoked with
255 its "internal" name, which normally contains the prefix "__builtin". */
257 static bool
258 called_as_built_in (tree node)
260 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
261 we want the name used to call the function, not the name it
262 will have. */
263 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
264 return is_builtin_name (name);
267 /* Return the alignment in bits of EXP, an object.
268 Don't return more than MAX_ALIGN no matter what. */
270 unsigned int
271 get_object_alignment_1 (tree exp, unsigned HOST_WIDE_INT *bitposp)
273 HOST_WIDE_INT bitsize, bitpos;
274 tree offset;
275 enum machine_mode mode;
276 int unsignedp, volatilep;
277 unsigned int align, inner;
279 /* Get the innermost object and the constant (bitpos) and possibly
280 variable (offset) offset of the access. */
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
284 /* Extract alignment information from the innermost object and
285 possibly adjust bitpos and offset. */
286 if (TREE_CODE (exp) == CONST_DECL)
287 exp = DECL_INITIAL (exp);
288 if (DECL_P (exp)
289 && TREE_CODE (exp) != LABEL_DECL)
290 align = DECL_ALIGN (exp);
291 else if (CONSTANT_CLASS_P (exp))
293 align = TYPE_ALIGN (TREE_TYPE (exp));
294 #ifdef CONSTANT_ALIGNMENT
295 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
296 #endif
298 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
299 align = TYPE_ALIGN (TREE_TYPE (exp));
300 else if (TREE_CODE (exp) == INDIRECT_REF)
301 align = TYPE_ALIGN (TREE_TYPE (exp));
302 else if (TREE_CODE (exp) == MEM_REF)
304 tree addr = TREE_OPERAND (exp, 0);
305 struct ptr_info_def *pi;
306 if (TREE_CODE (addr) == BIT_AND_EXPR
307 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
309 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
310 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
311 align *= BITS_PER_UNIT;
312 addr = TREE_OPERAND (addr, 0);
314 else
315 align = BITS_PER_UNIT;
316 if (TREE_CODE (addr) == SSA_NAME
317 && (pi = SSA_NAME_PTR_INFO (addr)))
319 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
320 align = MAX (pi->align * BITS_PER_UNIT, align);
322 else if (TREE_CODE (addr) == ADDR_EXPR)
323 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0), ~0U));
324 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
326 else if (TREE_CODE (exp) == TARGET_MEM_REF)
328 struct ptr_info_def *pi;
329 tree addr = TMR_BASE (exp);
330 if (TREE_CODE (addr) == BIT_AND_EXPR
331 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
333 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
334 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
335 align *= BITS_PER_UNIT;
336 addr = TREE_OPERAND (addr, 0);
338 else
339 align = BITS_PER_UNIT;
340 if (TREE_CODE (addr) == SSA_NAME
341 && (pi = SSA_NAME_PTR_INFO (addr)))
343 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
344 align = MAX (pi->align * BITS_PER_UNIT, align);
346 else if (TREE_CODE (addr) == ADDR_EXPR)
347 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0), ~0U));
348 if (TMR_OFFSET (exp))
349 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
350 if (TMR_INDEX (exp) && TMR_STEP (exp))
352 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
353 align = MIN (align, (step & -step) * BITS_PER_UNIT);
355 else if (TMR_INDEX (exp))
356 align = BITS_PER_UNIT;
357 if (TMR_INDEX2 (exp))
358 align = BITS_PER_UNIT;
360 else
361 align = BITS_PER_UNIT;
363 /* If there is a non-constant offset part extract the maximum
364 alignment that can prevail. */
365 inner = ~0U;
366 while (offset)
368 tree next_offset;
370 if (TREE_CODE (offset) == PLUS_EXPR)
372 next_offset = TREE_OPERAND (offset, 0);
373 offset = TREE_OPERAND (offset, 1);
375 else
376 next_offset = NULL;
377 if (host_integerp (offset, 1))
379 /* Any overflow in calculating offset_bits won't change
380 the alignment. */
381 unsigned offset_bits
382 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
384 if (offset_bits)
385 inner = MIN (inner, (offset_bits & -offset_bits));
387 else if (TREE_CODE (offset) == MULT_EXPR
388 && host_integerp (TREE_OPERAND (offset, 1), 1))
390 /* Any overflow in calculating offset_factor won't change
391 the alignment. */
392 unsigned offset_factor
393 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
394 * BITS_PER_UNIT);
396 if (offset_factor)
397 inner = MIN (inner, (offset_factor & -offset_factor));
399 else
401 inner = MIN (inner, BITS_PER_UNIT);
402 break;
404 offset = next_offset;
407 /* Alignment is innermost object alignment adjusted by the constant
408 and non-constant offset parts. */
409 align = MIN (align, inner);
410 bitpos = bitpos & (align - 1);
412 *bitposp = bitpos;
413 return align;
416 /* Return the alignment in bits of EXP, an object.
417 Don't return more than MAX_ALIGN no matter what. */
419 unsigned int
420 get_object_alignment (tree exp, unsigned int max_align)
422 unsigned HOST_WIDE_INT bitpos = 0;
423 unsigned int align;
425 align = get_object_alignment_1 (exp, &bitpos);
427 /* align and bitpos now specify known low bits of the pointer.
428 ptr & (align - 1) == bitpos. */
430 if (bitpos != 0)
431 align = (bitpos & -bitpos);
433 return MIN (align, max_align);
436 /* Returns true iff we can trust that alignment information has been
437 calculated properly. */
439 bool
440 can_trust_pointer_alignment (void)
442 /* We rely on TER to compute accurate alignment information. */
443 return (optimize && flag_tree_ter);
446 /* Return the alignment in bits of EXP, a pointer valued expression.
447 But don't return more than MAX_ALIGN no matter what.
448 The alignment returned is, by default, the alignment of the thing that
449 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
451 Otherwise, look at the expression to see if we can do better, i.e., if the
452 expression is actually pointing at an object whose alignment is tighter. */
454 unsigned int
455 get_pointer_alignment (tree exp, unsigned int max_align)
457 STRIP_NOPS (exp);
459 if (TREE_CODE (exp) == ADDR_EXPR)
460 return get_object_alignment (TREE_OPERAND (exp, 0), max_align);
461 else if (TREE_CODE (exp) == SSA_NAME
462 && POINTER_TYPE_P (TREE_TYPE (exp)))
464 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
465 unsigned align;
466 if (!pi)
467 return BITS_PER_UNIT;
468 if (pi->misalign != 0)
469 align = (pi->misalign & -pi->misalign);
470 else
471 align = pi->align;
472 return MIN (max_align, align * BITS_PER_UNIT);
475 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
478 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
479 way, because it could contain a zero byte in the middle.
480 TREE_STRING_LENGTH is the size of the character array, not the string.
482 ONLY_VALUE should be nonzero if the result is not going to be emitted
483 into the instruction stream and zero if it is going to be expanded.
484 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
485 is returned, otherwise NULL, since
486 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
487 evaluate the side-effects.
489 The value returned is of type `ssizetype'.
491 Unfortunately, string_constant can't access the values of const char
492 arrays with initializers, so neither can we do so here. */
494 tree
495 c_strlen (tree src, int only_value)
497 tree offset_node;
498 HOST_WIDE_INT offset;
499 int max;
500 const char *ptr;
501 location_t loc;
503 STRIP_NOPS (src);
504 if (TREE_CODE (src) == COND_EXPR
505 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
507 tree len1, len2;
509 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
510 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
511 if (tree_int_cst_equal (len1, len2))
512 return len1;
515 if (TREE_CODE (src) == COMPOUND_EXPR
516 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
517 return c_strlen (TREE_OPERAND (src, 1), only_value);
519 loc = EXPR_LOC_OR_HERE (src);
521 src = string_constant (src, &offset_node);
522 if (src == 0)
523 return NULL_TREE;
525 max = TREE_STRING_LENGTH (src) - 1;
526 ptr = TREE_STRING_POINTER (src);
528 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
530 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
531 compute the offset to the following null if we don't know where to
532 start searching for it. */
533 int i;
535 for (i = 0; i < max; i++)
536 if (ptr[i] == 0)
537 return NULL_TREE;
539 /* We don't know the starting offset, but we do know that the string
540 has no internal zero bytes. We can assume that the offset falls
541 within the bounds of the string; otherwise, the programmer deserves
542 what he gets. Subtract the offset from the length of the string,
543 and return that. This would perhaps not be valid if we were dealing
544 with named arrays in addition to literal string constants. */
546 return size_diffop_loc (loc, size_int (max), offset_node);
549 /* We have a known offset into the string. Start searching there for
550 a null character if we can represent it as a single HOST_WIDE_INT. */
551 if (offset_node == 0)
552 offset = 0;
553 else if (! host_integerp (offset_node, 0))
554 offset = -1;
555 else
556 offset = tree_low_cst (offset_node, 0);
558 /* If the offset is known to be out of bounds, warn, and call strlen at
559 runtime. */
560 if (offset < 0 || offset > max)
562 /* Suppress multiple warnings for propagated constant strings. */
563 if (! TREE_NO_WARNING (src))
565 warning_at (loc, 0, "offset outside bounds of constant string");
566 TREE_NO_WARNING (src) = 1;
568 return NULL_TREE;
571 /* Use strlen to search for the first zero byte. Since any strings
572 constructed with build_string will have nulls appended, we win even
573 if we get handed something like (char[4])"abcd".
575 Since OFFSET is our starting index into the string, no further
576 calculation is needed. */
577 return ssize_int (strlen (ptr + offset));
580 /* Return a char pointer for a C string if it is a string constant
581 or sum of string constant and integer constant. */
583 static const char *
584 c_getstr (tree src)
586 tree offset_node;
588 src = string_constant (src, &offset_node);
589 if (src == 0)
590 return 0;
592 if (offset_node == 0)
593 return TREE_STRING_POINTER (src);
594 else if (!host_integerp (offset_node, 1)
595 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
596 return 0;
598 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
601 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
602 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
604 static rtx
605 c_readstr (const char *str, enum machine_mode mode)
607 HOST_WIDE_INT c[2];
608 HOST_WIDE_INT ch;
609 unsigned int i, j;
611 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
613 c[0] = 0;
614 c[1] = 0;
615 ch = 1;
616 for (i = 0; i < GET_MODE_SIZE (mode); i++)
618 j = i;
619 if (WORDS_BIG_ENDIAN)
620 j = GET_MODE_SIZE (mode) - i - 1;
621 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
622 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
623 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
624 j *= BITS_PER_UNIT;
625 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
627 if (ch)
628 ch = (unsigned char) str[i];
629 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
631 return immed_double_const (c[0], c[1], mode);
634 /* Cast a target constant CST to target CHAR and if that value fits into
635 host char type, return zero and put that value into variable pointed to by
636 P. */
638 static int
639 target_char_cast (tree cst, char *p)
641 unsigned HOST_WIDE_INT val, hostval;
643 if (TREE_CODE (cst) != INTEGER_CST
644 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
645 return 1;
647 val = TREE_INT_CST_LOW (cst);
648 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
649 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
651 hostval = val;
652 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
653 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
655 if (val != hostval)
656 return 1;
658 *p = hostval;
659 return 0;
662 /* Similar to save_expr, but assumes that arbitrary code is not executed
663 in between the multiple evaluations. In particular, we assume that a
664 non-addressable local variable will not be modified. */
666 static tree
667 builtin_save_expr (tree exp)
669 if (TREE_CODE (exp) == SSA_NAME
670 || (TREE_ADDRESSABLE (exp) == 0
671 && (TREE_CODE (exp) == PARM_DECL
672 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
673 return exp;
675 return save_expr (exp);
678 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
679 times to get the address of either a higher stack frame, or a return
680 address located within it (depending on FNDECL_CODE). */
682 static rtx
683 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
685 int i;
687 #ifdef INITIAL_FRAME_ADDRESS_RTX
688 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
689 #else
690 rtx tem;
692 /* For a zero count with __builtin_return_address, we don't care what
693 frame address we return, because target-specific definitions will
694 override us. Therefore frame pointer elimination is OK, and using
695 the soft frame pointer is OK.
697 For a nonzero count, or a zero count with __builtin_frame_address,
698 we require a stable offset from the current frame pointer to the
699 previous one, so we must use the hard frame pointer, and
700 we must disable frame pointer elimination. */
701 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
702 tem = frame_pointer_rtx;
703 else
705 tem = hard_frame_pointer_rtx;
707 /* Tell reload not to eliminate the frame pointer. */
708 crtl->accesses_prior_frames = 1;
710 #endif
712 /* Some machines need special handling before we can access
713 arbitrary frames. For example, on the SPARC, we must first flush
714 all register windows to the stack. */
715 #ifdef SETUP_FRAME_ADDRESSES
716 if (count > 0)
717 SETUP_FRAME_ADDRESSES ();
718 #endif
720 /* On the SPARC, the return address is not in the frame, it is in a
721 register. There is no way to access it off of the current frame
722 pointer, but it can be accessed off the previous frame pointer by
723 reading the value from the register window save area. */
724 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
725 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
726 count--;
727 #endif
729 /* Scan back COUNT frames to the specified frame. */
730 for (i = 0; i < count; i++)
732 /* Assume the dynamic chain pointer is in the word that the
733 frame address points to, unless otherwise specified. */
734 #ifdef DYNAMIC_CHAIN_ADDRESS
735 tem = DYNAMIC_CHAIN_ADDRESS (tem);
736 #endif
737 tem = memory_address (Pmode, tem);
738 tem = gen_frame_mem (Pmode, tem);
739 tem = copy_to_reg (tem);
742 /* For __builtin_frame_address, return what we've got. But, on
743 the SPARC for example, we may have to add a bias. */
744 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
745 #ifdef FRAME_ADDR_RTX
746 return FRAME_ADDR_RTX (tem);
747 #else
748 return tem;
749 #endif
751 /* For __builtin_return_address, get the return address from that frame. */
752 #ifdef RETURN_ADDR_RTX
753 tem = RETURN_ADDR_RTX (count, tem);
754 #else
755 tem = memory_address (Pmode,
756 plus_constant (tem, GET_MODE_SIZE (Pmode)));
757 tem = gen_frame_mem (Pmode, tem);
758 #endif
759 return tem;
762 /* Alias set used for setjmp buffer. */
763 static alias_set_type setjmp_alias_set = -1;
765 /* Construct the leading half of a __builtin_setjmp call. Control will
766 return to RECEIVER_LABEL. This is also called directly by the SJLJ
767 exception handling code. */
769 void
770 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
772 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
773 rtx stack_save;
774 rtx mem;
776 if (setjmp_alias_set == -1)
777 setjmp_alias_set = new_alias_set ();
779 buf_addr = convert_memory_address (Pmode, buf_addr);
781 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
783 /* We store the frame pointer and the address of receiver_label in
784 the buffer and use the rest of it for the stack save area, which
785 is machine-dependent. */
787 mem = gen_rtx_MEM (Pmode, buf_addr);
788 set_mem_alias_set (mem, setjmp_alias_set);
789 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
791 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
792 set_mem_alias_set (mem, setjmp_alias_set);
794 emit_move_insn (validize_mem (mem),
795 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
797 stack_save = gen_rtx_MEM (sa_mode,
798 plus_constant (buf_addr,
799 2 * GET_MODE_SIZE (Pmode)));
800 set_mem_alias_set (stack_save, setjmp_alias_set);
801 emit_stack_save (SAVE_NONLOCAL, &stack_save);
803 /* If there is further processing to do, do it. */
804 #ifdef HAVE_builtin_setjmp_setup
805 if (HAVE_builtin_setjmp_setup)
806 emit_insn (gen_builtin_setjmp_setup (buf_addr));
807 #endif
809 /* We have a nonlocal label. */
810 cfun->has_nonlocal_label = 1;
813 /* Construct the trailing part of a __builtin_setjmp call. This is
814 also called directly by the SJLJ exception handling code. */
816 void
817 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
819 rtx chain;
821 /* Clobber the FP when we get here, so we have to make sure it's
822 marked as used by this function. */
823 emit_use (hard_frame_pointer_rtx);
825 /* Mark the static chain as clobbered here so life information
826 doesn't get messed up for it. */
827 chain = targetm.calls.static_chain (current_function_decl, true);
828 if (chain && REG_P (chain))
829 emit_clobber (chain);
831 /* Now put in the code to restore the frame pointer, and argument
832 pointer, if needed. */
833 #ifdef HAVE_nonlocal_goto
834 if (! HAVE_nonlocal_goto)
835 #endif
837 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
838 /* This might change the hard frame pointer in ways that aren't
839 apparent to early optimization passes, so force a clobber. */
840 emit_clobber (hard_frame_pointer_rtx);
843 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
844 if (fixed_regs[ARG_POINTER_REGNUM])
846 #ifdef ELIMINABLE_REGS
847 size_t i;
848 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
850 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
851 if (elim_regs[i].from == ARG_POINTER_REGNUM
852 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
853 break;
855 if (i == ARRAY_SIZE (elim_regs))
856 #endif
858 /* Now restore our arg pointer from the address at which it
859 was saved in our stack frame. */
860 emit_move_insn (crtl->args.internal_arg_pointer,
861 copy_to_reg (get_arg_pointer_save_area ()));
864 #endif
866 #ifdef HAVE_builtin_setjmp_receiver
867 if (HAVE_builtin_setjmp_receiver)
868 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
869 else
870 #endif
871 #ifdef HAVE_nonlocal_goto_receiver
872 if (HAVE_nonlocal_goto_receiver)
873 emit_insn (gen_nonlocal_goto_receiver ());
874 else
875 #endif
876 { /* Nothing */ }
878 /* We must not allow the code we just generated to be reordered by
879 scheduling. Specifically, the update of the frame pointer must
880 happen immediately, not later. */
881 emit_insn (gen_blockage ());
884 /* __builtin_longjmp is passed a pointer to an array of five words (not
885 all will be used on all machines). It operates similarly to the C
886 library function of the same name, but is more efficient. Much of
887 the code below is copied from the handling of non-local gotos. */
889 static void
890 expand_builtin_longjmp (rtx buf_addr, rtx value)
892 rtx fp, lab, stack, insn, last;
893 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
895 /* DRAP is needed for stack realign if longjmp is expanded to current
896 function */
897 if (SUPPORTS_STACK_ALIGNMENT)
898 crtl->need_drap = true;
900 if (setjmp_alias_set == -1)
901 setjmp_alias_set = new_alias_set ();
903 buf_addr = convert_memory_address (Pmode, buf_addr);
905 buf_addr = force_reg (Pmode, buf_addr);
907 /* We require that the user must pass a second argument of 1, because
908 that is what builtin_setjmp will return. */
909 gcc_assert (value == const1_rtx);
911 last = get_last_insn ();
912 #ifdef HAVE_builtin_longjmp
913 if (HAVE_builtin_longjmp)
914 emit_insn (gen_builtin_longjmp (buf_addr));
915 else
916 #endif
918 fp = gen_rtx_MEM (Pmode, buf_addr);
919 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
920 GET_MODE_SIZE (Pmode)));
922 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
923 2 * GET_MODE_SIZE (Pmode)));
924 set_mem_alias_set (fp, setjmp_alias_set);
925 set_mem_alias_set (lab, setjmp_alias_set);
926 set_mem_alias_set (stack, setjmp_alias_set);
928 /* Pick up FP, label, and SP from the block and jump. This code is
929 from expand_goto in stmt.c; see there for detailed comments. */
930 #ifdef HAVE_nonlocal_goto
931 if (HAVE_nonlocal_goto)
932 /* We have to pass a value to the nonlocal_goto pattern that will
933 get copied into the static_chain pointer, but it does not matter
934 what that value is, because builtin_setjmp does not use it. */
935 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
936 else
937 #endif
939 lab = copy_to_reg (lab);
941 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
942 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
944 emit_move_insn (hard_frame_pointer_rtx, fp);
945 emit_stack_restore (SAVE_NONLOCAL, stack);
947 emit_use (hard_frame_pointer_rtx);
948 emit_use (stack_pointer_rtx);
949 emit_indirect_jump (lab);
953 /* Search backwards and mark the jump insn as a non-local goto.
954 Note that this precludes the use of __builtin_longjmp to a
955 __builtin_setjmp target in the same function. However, we've
956 already cautioned the user that these functions are for
957 internal exception handling use only. */
958 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
960 gcc_assert (insn != last);
962 if (JUMP_P (insn))
964 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
965 break;
967 else if (CALL_P (insn))
968 break;
972 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
973 and the address of the save area. */
975 static rtx
976 expand_builtin_nonlocal_goto (tree exp)
978 tree t_label, t_save_area;
979 rtx r_label, r_save_area, r_fp, r_sp, insn;
981 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
982 return NULL_RTX;
984 t_label = CALL_EXPR_ARG (exp, 0);
985 t_save_area = CALL_EXPR_ARG (exp, 1);
987 r_label = expand_normal (t_label);
988 r_label = convert_memory_address (Pmode, r_label);
989 r_save_area = expand_normal (t_save_area);
990 r_save_area = convert_memory_address (Pmode, r_save_area);
991 /* Copy the address of the save location to a register just in case it was
992 based on the frame pointer. */
993 r_save_area = copy_to_reg (r_save_area);
994 r_fp = gen_rtx_MEM (Pmode, r_save_area);
995 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
996 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
998 crtl->has_nonlocal_goto = 1;
1000 #ifdef HAVE_nonlocal_goto
1001 /* ??? We no longer need to pass the static chain value, afaik. */
1002 if (HAVE_nonlocal_goto)
1003 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1004 else
1005 #endif
1007 r_label = copy_to_reg (r_label);
1009 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1010 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1012 /* Restore frame pointer for containing function. */
1013 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1014 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1016 /* USE of hard_frame_pointer_rtx added for consistency;
1017 not clear if really needed. */
1018 emit_use (hard_frame_pointer_rtx);
1019 emit_use (stack_pointer_rtx);
1021 /* If the architecture is using a GP register, we must
1022 conservatively assume that the target function makes use of it.
1023 The prologue of functions with nonlocal gotos must therefore
1024 initialize the GP register to the appropriate value, and we
1025 must then make sure that this value is live at the point
1026 of the jump. (Note that this doesn't necessarily apply
1027 to targets with a nonlocal_goto pattern; they are free
1028 to implement it in their own way. Note also that this is
1029 a no-op if the GP register is a global invariant.) */
1030 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1031 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1032 emit_use (pic_offset_table_rtx);
1034 emit_indirect_jump (r_label);
1037 /* Search backwards to the jump insn and mark it as a
1038 non-local goto. */
1039 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1041 if (JUMP_P (insn))
1043 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1044 break;
1046 else if (CALL_P (insn))
1047 break;
1050 return const0_rtx;
1053 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1054 (not all will be used on all machines) that was passed to __builtin_setjmp.
1055 It updates the stack pointer in that block to correspond to the current
1056 stack pointer. */
1058 static void
1059 expand_builtin_update_setjmp_buf (rtx buf_addr)
1061 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1062 rtx stack_save
1063 = gen_rtx_MEM (sa_mode,
1064 memory_address
1065 (sa_mode,
1066 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1068 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1071 /* Expand a call to __builtin_prefetch. For a target that does not support
1072 data prefetch, evaluate the memory address argument in case it has side
1073 effects. */
1075 static void
1076 expand_builtin_prefetch (tree exp)
1078 tree arg0, arg1, arg2;
1079 int nargs;
1080 rtx op0, op1, op2;
1082 if (!validate_arglist (exp, POINTER_TYPE, 0))
1083 return;
1085 arg0 = CALL_EXPR_ARG (exp, 0);
1087 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1088 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1089 locality). */
1090 nargs = call_expr_nargs (exp);
1091 if (nargs > 1)
1092 arg1 = CALL_EXPR_ARG (exp, 1);
1093 else
1094 arg1 = integer_zero_node;
1095 if (nargs > 2)
1096 arg2 = CALL_EXPR_ARG (exp, 2);
1097 else
1098 arg2 = integer_three_node;
1100 /* Argument 0 is an address. */
1101 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1103 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1104 if (TREE_CODE (arg1) != INTEGER_CST)
1106 error ("second argument to %<__builtin_prefetch%> must be a constant");
1107 arg1 = integer_zero_node;
1109 op1 = expand_normal (arg1);
1110 /* Argument 1 must be either zero or one. */
1111 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1113 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1114 " using zero");
1115 op1 = const0_rtx;
1118 /* Argument 2 (locality) must be a compile-time constant int. */
1119 if (TREE_CODE (arg2) != INTEGER_CST)
1121 error ("third argument to %<__builtin_prefetch%> must be a constant");
1122 arg2 = integer_zero_node;
1124 op2 = expand_normal (arg2);
1125 /* Argument 2 must be 0, 1, 2, or 3. */
1126 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1128 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1129 op2 = const0_rtx;
1132 #ifdef HAVE_prefetch
1133 if (HAVE_prefetch)
1135 struct expand_operand ops[3];
1137 create_address_operand (&ops[0], op0);
1138 create_integer_operand (&ops[1], INTVAL (op1));
1139 create_integer_operand (&ops[2], INTVAL (op2));
1140 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1141 return;
1143 #endif
1145 /* Don't do anything with direct references to volatile memory, but
1146 generate code to handle other side effects. */
1147 if (!MEM_P (op0) && side_effects_p (op0))
1148 emit_insn (op0);
1151 /* Get a MEM rtx for expression EXP which is the address of an operand
1152 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1153 the maximum length of the block of memory that might be accessed or
1154 NULL if unknown. */
1156 static rtx
1157 get_memory_rtx (tree exp, tree len)
1159 tree orig_exp = exp;
1160 rtx addr, mem;
1161 HOST_WIDE_INT off;
1163 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1164 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1165 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1166 exp = TREE_OPERAND (exp, 0);
1168 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1169 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1171 /* Get an expression we can use to find the attributes to assign to MEM.
1172 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1173 we can. First remove any nops. */
1174 while (CONVERT_EXPR_P (exp)
1175 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1176 exp = TREE_OPERAND (exp, 0);
1178 off = 0;
1179 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1180 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1181 && host_integerp (TREE_OPERAND (exp, 1), 0)
1182 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1183 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1184 else if (TREE_CODE (exp) == ADDR_EXPR)
1185 exp = TREE_OPERAND (exp, 0);
1186 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1187 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1188 else
1189 exp = NULL;
1191 /* Honor attributes derived from exp, except for the alias set
1192 (as builtin stringops may alias with anything) and the size
1193 (as stringops may access multiple array elements). */
1194 if (exp)
1196 set_mem_attributes (mem, exp, 0);
1198 if (off)
1199 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1201 /* Allow the string and memory builtins to overflow from one
1202 field into another, see http://gcc.gnu.org/PR23561.
1203 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1204 memory accessed by the string or memory builtin will fit
1205 within the field. */
1206 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1208 tree mem_expr = MEM_EXPR (mem);
1209 HOST_WIDE_INT offset = -1, length = -1;
1210 tree inner = exp;
1212 while (TREE_CODE (inner) == ARRAY_REF
1213 || CONVERT_EXPR_P (inner)
1214 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1215 || TREE_CODE (inner) == SAVE_EXPR)
1216 inner = TREE_OPERAND (inner, 0);
1218 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1220 if (MEM_OFFSET (mem)
1221 && CONST_INT_P (MEM_OFFSET (mem)))
1222 offset = INTVAL (MEM_OFFSET (mem));
1224 if (offset >= 0 && len && host_integerp (len, 0))
1225 length = tree_low_cst (len, 0);
1227 while (TREE_CODE (inner) == COMPONENT_REF)
1229 tree field = TREE_OPERAND (inner, 1);
1230 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1231 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1233 /* Bitfields are generally not byte-addressable. */
1234 gcc_assert (!DECL_BIT_FIELD (field)
1235 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1236 % BITS_PER_UNIT) == 0
1237 && host_integerp (DECL_SIZE (field), 0)
1238 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1239 % BITS_PER_UNIT) == 0));
1241 /* If we can prove that the memory starting at XEXP (mem, 0) and
1242 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1243 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1244 fields without DECL_SIZE_UNIT like flexible array members. */
1245 if (length >= 0
1246 && DECL_SIZE_UNIT (field)
1247 && host_integerp (DECL_SIZE_UNIT (field), 0))
1249 HOST_WIDE_INT size
1250 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1251 if (offset <= size
1252 && length <= size
1253 && offset + length <= size)
1254 break;
1257 if (offset >= 0
1258 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1259 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1260 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1261 / BITS_PER_UNIT;
1262 else
1264 offset = -1;
1265 length = -1;
1268 mem_expr = TREE_OPERAND (mem_expr, 0);
1269 inner = TREE_OPERAND (inner, 0);
1272 if (mem_expr == NULL)
1273 offset = -1;
1274 if (mem_expr != MEM_EXPR (mem))
1276 set_mem_expr (mem, mem_expr);
1277 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1280 set_mem_alias_set (mem, 0);
1281 set_mem_size (mem, NULL_RTX);
1284 return mem;
1287 /* Built-in functions to perform an untyped call and return. */
1289 #define apply_args_mode \
1290 (this_target_builtins->x_apply_args_mode)
1291 #define apply_result_mode \
1292 (this_target_builtins->x_apply_result_mode)
1294 /* Return the size required for the block returned by __builtin_apply_args,
1295 and initialize apply_args_mode. */
1297 static int
1298 apply_args_size (void)
1300 static int size = -1;
1301 int align;
1302 unsigned int regno;
1303 enum machine_mode mode;
1305 /* The values computed by this function never change. */
1306 if (size < 0)
1308 /* The first value is the incoming arg-pointer. */
1309 size = GET_MODE_SIZE (Pmode);
1311 /* The second value is the structure value address unless this is
1312 passed as an "invisible" first argument. */
1313 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1314 size += GET_MODE_SIZE (Pmode);
1316 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1317 if (FUNCTION_ARG_REGNO_P (regno))
1319 mode = targetm.calls.get_raw_arg_mode (regno);
1321 gcc_assert (mode != VOIDmode);
1323 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1324 if (size % align != 0)
1325 size = CEIL (size, align) * align;
1326 size += GET_MODE_SIZE (mode);
1327 apply_args_mode[regno] = mode;
1329 else
1331 apply_args_mode[regno] = VOIDmode;
1334 return size;
1337 /* Return the size required for the block returned by __builtin_apply,
1338 and initialize apply_result_mode. */
1340 static int
1341 apply_result_size (void)
1343 static int size = -1;
1344 int align, regno;
1345 enum machine_mode mode;
1347 /* The values computed by this function never change. */
1348 if (size < 0)
1350 size = 0;
1352 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1353 if (targetm.calls.function_value_regno_p (regno))
1355 mode = targetm.calls.get_raw_result_mode (regno);
1357 gcc_assert (mode != VOIDmode);
1359 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1360 if (size % align != 0)
1361 size = CEIL (size, align) * align;
1362 size += GET_MODE_SIZE (mode);
1363 apply_result_mode[regno] = mode;
1365 else
1366 apply_result_mode[regno] = VOIDmode;
1368 /* Allow targets that use untyped_call and untyped_return to override
1369 the size so that machine-specific information can be stored here. */
1370 #ifdef APPLY_RESULT_SIZE
1371 size = APPLY_RESULT_SIZE;
1372 #endif
1374 return size;
1377 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1378 /* Create a vector describing the result block RESULT. If SAVEP is true,
1379 the result block is used to save the values; otherwise it is used to
1380 restore the values. */
1382 static rtx
1383 result_vector (int savep, rtx result)
1385 int regno, size, align, nelts;
1386 enum machine_mode mode;
1387 rtx reg, mem;
1388 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1390 size = nelts = 0;
1391 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1392 if ((mode = apply_result_mode[regno]) != VOIDmode)
1394 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1395 if (size % align != 0)
1396 size = CEIL (size, align) * align;
1397 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1398 mem = adjust_address (result, mode, size);
1399 savevec[nelts++] = (savep
1400 ? gen_rtx_SET (VOIDmode, mem, reg)
1401 : gen_rtx_SET (VOIDmode, reg, mem));
1402 size += GET_MODE_SIZE (mode);
1404 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1406 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1408 /* Save the state required to perform an untyped call with the same
1409 arguments as were passed to the current function. */
1411 static rtx
1412 expand_builtin_apply_args_1 (void)
1414 rtx registers, tem;
1415 int size, align, regno;
1416 enum machine_mode mode;
1417 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1419 /* Create a block where the arg-pointer, structure value address,
1420 and argument registers can be saved. */
1421 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1423 /* Walk past the arg-pointer and structure value address. */
1424 size = GET_MODE_SIZE (Pmode);
1425 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1426 size += GET_MODE_SIZE (Pmode);
1428 /* Save each register used in calling a function to the block. */
1429 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1430 if ((mode = apply_args_mode[regno]) != VOIDmode)
1432 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1433 if (size % align != 0)
1434 size = CEIL (size, align) * align;
1436 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1438 emit_move_insn (adjust_address (registers, mode, size), tem);
1439 size += GET_MODE_SIZE (mode);
1442 /* Save the arg pointer to the block. */
1443 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1444 #ifdef STACK_GROWS_DOWNWARD
1445 /* We need the pointer as the caller actually passed them to us, not
1446 as we might have pretended they were passed. Make sure it's a valid
1447 operand, as emit_move_insn isn't expected to handle a PLUS. */
1449 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1450 NULL_RTX);
1451 #endif
1452 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1454 size = GET_MODE_SIZE (Pmode);
1456 /* Save the structure value address unless this is passed as an
1457 "invisible" first argument. */
1458 if (struct_incoming_value)
1460 emit_move_insn (adjust_address (registers, Pmode, size),
1461 copy_to_reg (struct_incoming_value));
1462 size += GET_MODE_SIZE (Pmode);
1465 /* Return the address of the block. */
1466 return copy_addr_to_reg (XEXP (registers, 0));
1469 /* __builtin_apply_args returns block of memory allocated on
1470 the stack into which is stored the arg pointer, structure
1471 value address, static chain, and all the registers that might
1472 possibly be used in performing a function call. The code is
1473 moved to the start of the function so the incoming values are
1474 saved. */
1476 static rtx
1477 expand_builtin_apply_args (void)
1479 /* Don't do __builtin_apply_args more than once in a function.
1480 Save the result of the first call and reuse it. */
1481 if (apply_args_value != 0)
1482 return apply_args_value;
1484 /* When this function is called, it means that registers must be
1485 saved on entry to this function. So we migrate the
1486 call to the first insn of this function. */
1487 rtx temp;
1488 rtx seq;
1490 start_sequence ();
1491 temp = expand_builtin_apply_args_1 ();
1492 seq = get_insns ();
1493 end_sequence ();
1495 apply_args_value = temp;
1497 /* Put the insns after the NOTE that starts the function.
1498 If this is inside a start_sequence, make the outer-level insn
1499 chain current, so the code is placed at the start of the
1500 function. If internal_arg_pointer is a non-virtual pseudo,
1501 it needs to be placed after the function that initializes
1502 that pseudo. */
1503 push_topmost_sequence ();
1504 if (REG_P (crtl->args.internal_arg_pointer)
1505 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1506 emit_insn_before (seq, parm_birth_insn);
1507 else
1508 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1509 pop_topmost_sequence ();
1510 return temp;
1514 /* Perform an untyped call and save the state required to perform an
1515 untyped return of whatever value was returned by the given function. */
1517 static rtx
1518 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1520 int size, align, regno;
1521 enum machine_mode mode;
1522 rtx incoming_args, result, reg, dest, src, call_insn;
1523 rtx old_stack_level = 0;
1524 rtx call_fusage = 0;
1525 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1527 arguments = convert_memory_address (Pmode, arguments);
1529 /* Create a block where the return registers can be saved. */
1530 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1532 /* Fetch the arg pointer from the ARGUMENTS block. */
1533 incoming_args = gen_reg_rtx (Pmode);
1534 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1535 #ifndef STACK_GROWS_DOWNWARD
1536 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1537 incoming_args, 0, OPTAB_LIB_WIDEN);
1538 #endif
1540 /* Push a new argument block and copy the arguments. Do not allow
1541 the (potential) memcpy call below to interfere with our stack
1542 manipulations. */
1543 do_pending_stack_adjust ();
1544 NO_DEFER_POP;
1546 /* Save the stack with nonlocal if available. */
1547 #ifdef HAVE_save_stack_nonlocal
1548 if (HAVE_save_stack_nonlocal)
1549 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1550 else
1551 #endif
1552 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1554 /* Allocate a block of memory onto the stack and copy the memory
1555 arguments to the outgoing arguments address. We can pass TRUE
1556 as the 4th argument because we just saved the stack pointer
1557 and will restore it right after the call. */
1558 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1560 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1561 may have already set current_function_calls_alloca to true.
1562 current_function_calls_alloca won't be set if argsize is zero,
1563 so we have to guarantee need_drap is true here. */
1564 if (SUPPORTS_STACK_ALIGNMENT)
1565 crtl->need_drap = true;
1567 dest = virtual_outgoing_args_rtx;
1568 #ifndef STACK_GROWS_DOWNWARD
1569 if (CONST_INT_P (argsize))
1570 dest = plus_constant (dest, -INTVAL (argsize));
1571 else
1572 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1573 #endif
1574 dest = gen_rtx_MEM (BLKmode, dest);
1575 set_mem_align (dest, PARM_BOUNDARY);
1576 src = gen_rtx_MEM (BLKmode, incoming_args);
1577 set_mem_align (src, PARM_BOUNDARY);
1578 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1580 /* Refer to the argument block. */
1581 apply_args_size ();
1582 arguments = gen_rtx_MEM (BLKmode, arguments);
1583 set_mem_align (arguments, PARM_BOUNDARY);
1585 /* Walk past the arg-pointer and structure value address. */
1586 size = GET_MODE_SIZE (Pmode);
1587 if (struct_value)
1588 size += GET_MODE_SIZE (Pmode);
1590 /* Restore each of the registers previously saved. Make USE insns
1591 for each of these registers for use in making the call. */
1592 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1593 if ((mode = apply_args_mode[regno]) != VOIDmode)
1595 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1596 if (size % align != 0)
1597 size = CEIL (size, align) * align;
1598 reg = gen_rtx_REG (mode, regno);
1599 emit_move_insn (reg, adjust_address (arguments, mode, size));
1600 use_reg (&call_fusage, reg);
1601 size += GET_MODE_SIZE (mode);
1604 /* Restore the structure value address unless this is passed as an
1605 "invisible" first argument. */
1606 size = GET_MODE_SIZE (Pmode);
1607 if (struct_value)
1609 rtx value = gen_reg_rtx (Pmode);
1610 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1611 emit_move_insn (struct_value, value);
1612 if (REG_P (struct_value))
1613 use_reg (&call_fusage, struct_value);
1614 size += GET_MODE_SIZE (Pmode);
1617 /* All arguments and registers used for the call are set up by now! */
1618 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1620 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1621 and we don't want to load it into a register as an optimization,
1622 because prepare_call_address already did it if it should be done. */
1623 if (GET_CODE (function) != SYMBOL_REF)
1624 function = memory_address (FUNCTION_MODE, function);
1626 /* Generate the actual call instruction and save the return value. */
1627 #ifdef HAVE_untyped_call
1628 if (HAVE_untyped_call)
1629 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1630 result, result_vector (1, result)));
1631 else
1632 #endif
1633 #ifdef HAVE_call_value
1634 if (HAVE_call_value)
1636 rtx valreg = 0;
1638 /* Locate the unique return register. It is not possible to
1639 express a call that sets more than one return register using
1640 call_value; use untyped_call for that. In fact, untyped_call
1641 only needs to save the return registers in the given block. */
1642 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1643 if ((mode = apply_result_mode[regno]) != VOIDmode)
1645 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1647 valreg = gen_rtx_REG (mode, regno);
1650 emit_call_insn (GEN_CALL_VALUE (valreg,
1651 gen_rtx_MEM (FUNCTION_MODE, function),
1652 const0_rtx, NULL_RTX, const0_rtx));
1654 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1656 else
1657 #endif
1658 gcc_unreachable ();
1660 /* Find the CALL insn we just emitted, and attach the register usage
1661 information. */
1662 call_insn = last_call_insn ();
1663 add_function_usage_to (call_insn, call_fusage);
1665 /* Restore the stack. */
1666 #ifdef HAVE_save_stack_nonlocal
1667 if (HAVE_save_stack_nonlocal)
1668 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1669 else
1670 #endif
1671 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1673 OK_DEFER_POP;
1675 /* Return the address of the result block. */
1676 result = copy_addr_to_reg (XEXP (result, 0));
1677 return convert_memory_address (ptr_mode, result);
1680 /* Perform an untyped return. */
1682 static void
1683 expand_builtin_return (rtx result)
1685 int size, align, regno;
1686 enum machine_mode mode;
1687 rtx reg;
1688 rtx call_fusage = 0;
1690 result = convert_memory_address (Pmode, result);
1692 apply_result_size ();
1693 result = gen_rtx_MEM (BLKmode, result);
1695 #ifdef HAVE_untyped_return
1696 if (HAVE_untyped_return)
1698 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1699 emit_barrier ();
1700 return;
1702 #endif
1704 /* Restore the return value and note that each value is used. */
1705 size = 0;
1706 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1707 if ((mode = apply_result_mode[regno]) != VOIDmode)
1709 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1710 if (size % align != 0)
1711 size = CEIL (size, align) * align;
1712 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1713 emit_move_insn (reg, adjust_address (result, mode, size));
1715 push_to_sequence (call_fusage);
1716 emit_use (reg);
1717 call_fusage = get_insns ();
1718 end_sequence ();
1719 size += GET_MODE_SIZE (mode);
1722 /* Put the USE insns before the return. */
1723 emit_insn (call_fusage);
1725 /* Return whatever values was restored by jumping directly to the end
1726 of the function. */
1727 expand_naked_return ();
1730 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1732 static enum type_class
1733 type_to_class (tree type)
1735 switch (TREE_CODE (type))
1737 case VOID_TYPE: return void_type_class;
1738 case INTEGER_TYPE: return integer_type_class;
1739 case ENUMERAL_TYPE: return enumeral_type_class;
1740 case BOOLEAN_TYPE: return boolean_type_class;
1741 case POINTER_TYPE: return pointer_type_class;
1742 case REFERENCE_TYPE: return reference_type_class;
1743 case OFFSET_TYPE: return offset_type_class;
1744 case REAL_TYPE: return real_type_class;
1745 case COMPLEX_TYPE: return complex_type_class;
1746 case FUNCTION_TYPE: return function_type_class;
1747 case METHOD_TYPE: return method_type_class;
1748 case RECORD_TYPE: return record_type_class;
1749 case UNION_TYPE:
1750 case QUAL_UNION_TYPE: return union_type_class;
1751 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1752 ? string_type_class : array_type_class);
1753 case LANG_TYPE: return lang_type_class;
1754 default: return no_type_class;
1758 /* Expand a call EXP to __builtin_classify_type. */
1760 static rtx
1761 expand_builtin_classify_type (tree exp)
1763 if (call_expr_nargs (exp))
1764 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1765 return GEN_INT (no_type_class);
1768 /* This helper macro, meant to be used in mathfn_built_in below,
1769 determines which among a set of three builtin math functions is
1770 appropriate for a given type mode. The `F' and `L' cases are
1771 automatically generated from the `double' case. */
1772 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1773 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1774 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1775 fcodel = BUILT_IN_MATHFN##L ; break;
1776 /* Similar to above, but appends _R after any F/L suffix. */
1777 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1778 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1779 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1780 fcodel = BUILT_IN_MATHFN##L_R ; break;
1782 /* Return mathematic function equivalent to FN but operating directly
1783 on TYPE, if available. If IMPLICIT is true find the function in
1784 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1785 can't do the conversion, return zero. */
1787 static tree
1788 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1790 tree const *const fn_arr
1791 = implicit ? implicit_built_in_decls : built_in_decls;
1792 enum built_in_function fcode, fcodef, fcodel;
1794 switch (fn)
1796 CASE_MATHFN (BUILT_IN_ACOS)
1797 CASE_MATHFN (BUILT_IN_ACOSH)
1798 CASE_MATHFN (BUILT_IN_ASIN)
1799 CASE_MATHFN (BUILT_IN_ASINH)
1800 CASE_MATHFN (BUILT_IN_ATAN)
1801 CASE_MATHFN (BUILT_IN_ATAN2)
1802 CASE_MATHFN (BUILT_IN_ATANH)
1803 CASE_MATHFN (BUILT_IN_CBRT)
1804 CASE_MATHFN (BUILT_IN_CEIL)
1805 CASE_MATHFN (BUILT_IN_CEXPI)
1806 CASE_MATHFN (BUILT_IN_COPYSIGN)
1807 CASE_MATHFN (BUILT_IN_COS)
1808 CASE_MATHFN (BUILT_IN_COSH)
1809 CASE_MATHFN (BUILT_IN_DREM)
1810 CASE_MATHFN (BUILT_IN_ERF)
1811 CASE_MATHFN (BUILT_IN_ERFC)
1812 CASE_MATHFN (BUILT_IN_EXP)
1813 CASE_MATHFN (BUILT_IN_EXP10)
1814 CASE_MATHFN (BUILT_IN_EXP2)
1815 CASE_MATHFN (BUILT_IN_EXPM1)
1816 CASE_MATHFN (BUILT_IN_FABS)
1817 CASE_MATHFN (BUILT_IN_FDIM)
1818 CASE_MATHFN (BUILT_IN_FLOOR)
1819 CASE_MATHFN (BUILT_IN_FMA)
1820 CASE_MATHFN (BUILT_IN_FMAX)
1821 CASE_MATHFN (BUILT_IN_FMIN)
1822 CASE_MATHFN (BUILT_IN_FMOD)
1823 CASE_MATHFN (BUILT_IN_FREXP)
1824 CASE_MATHFN (BUILT_IN_GAMMA)
1825 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1826 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1827 CASE_MATHFN (BUILT_IN_HYPOT)
1828 CASE_MATHFN (BUILT_IN_ILOGB)
1829 CASE_MATHFN (BUILT_IN_INF)
1830 CASE_MATHFN (BUILT_IN_ISINF)
1831 CASE_MATHFN (BUILT_IN_J0)
1832 CASE_MATHFN (BUILT_IN_J1)
1833 CASE_MATHFN (BUILT_IN_JN)
1834 CASE_MATHFN (BUILT_IN_LCEIL)
1835 CASE_MATHFN (BUILT_IN_LDEXP)
1836 CASE_MATHFN (BUILT_IN_LFLOOR)
1837 CASE_MATHFN (BUILT_IN_LGAMMA)
1838 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1839 CASE_MATHFN (BUILT_IN_LLCEIL)
1840 CASE_MATHFN (BUILT_IN_LLFLOOR)
1841 CASE_MATHFN (BUILT_IN_LLRINT)
1842 CASE_MATHFN (BUILT_IN_LLROUND)
1843 CASE_MATHFN (BUILT_IN_LOG)
1844 CASE_MATHFN (BUILT_IN_LOG10)
1845 CASE_MATHFN (BUILT_IN_LOG1P)
1846 CASE_MATHFN (BUILT_IN_LOG2)
1847 CASE_MATHFN (BUILT_IN_LOGB)
1848 CASE_MATHFN (BUILT_IN_LRINT)
1849 CASE_MATHFN (BUILT_IN_LROUND)
1850 CASE_MATHFN (BUILT_IN_MODF)
1851 CASE_MATHFN (BUILT_IN_NAN)
1852 CASE_MATHFN (BUILT_IN_NANS)
1853 CASE_MATHFN (BUILT_IN_NEARBYINT)
1854 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1855 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1856 CASE_MATHFN (BUILT_IN_POW)
1857 CASE_MATHFN (BUILT_IN_POWI)
1858 CASE_MATHFN (BUILT_IN_POW10)
1859 CASE_MATHFN (BUILT_IN_REMAINDER)
1860 CASE_MATHFN (BUILT_IN_REMQUO)
1861 CASE_MATHFN (BUILT_IN_RINT)
1862 CASE_MATHFN (BUILT_IN_ROUND)
1863 CASE_MATHFN (BUILT_IN_SCALB)
1864 CASE_MATHFN (BUILT_IN_SCALBLN)
1865 CASE_MATHFN (BUILT_IN_SCALBN)
1866 CASE_MATHFN (BUILT_IN_SIGNBIT)
1867 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1868 CASE_MATHFN (BUILT_IN_SIN)
1869 CASE_MATHFN (BUILT_IN_SINCOS)
1870 CASE_MATHFN (BUILT_IN_SINH)
1871 CASE_MATHFN (BUILT_IN_SQRT)
1872 CASE_MATHFN (BUILT_IN_TAN)
1873 CASE_MATHFN (BUILT_IN_TANH)
1874 CASE_MATHFN (BUILT_IN_TGAMMA)
1875 CASE_MATHFN (BUILT_IN_TRUNC)
1876 CASE_MATHFN (BUILT_IN_Y0)
1877 CASE_MATHFN (BUILT_IN_Y1)
1878 CASE_MATHFN (BUILT_IN_YN)
1880 default:
1881 return NULL_TREE;
1884 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1885 return fn_arr[fcode];
1886 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1887 return fn_arr[fcodef];
1888 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1889 return fn_arr[fcodel];
1890 else
1891 return NULL_TREE;
1894 /* Like mathfn_built_in_1(), but always use the implicit array. */
1896 tree
1897 mathfn_built_in (tree type, enum built_in_function fn)
1899 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1902 /* If errno must be maintained, expand the RTL to check if the result,
1903 TARGET, of a built-in function call, EXP, is NaN, and if so set
1904 errno to EDOM. */
1906 static void
1907 expand_errno_check (tree exp, rtx target)
1909 rtx lab = gen_label_rtx ();
1911 /* Test the result; if it is NaN, set errno=EDOM because
1912 the argument was not in the domain. */
1913 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1914 NULL_RTX, NULL_RTX, lab,
1915 /* The jump is very likely. */
1916 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1918 #ifdef TARGET_EDOM
1919 /* If this built-in doesn't throw an exception, set errno directly. */
1920 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1922 #ifdef GEN_ERRNO_RTX
1923 rtx errno_rtx = GEN_ERRNO_RTX;
1924 #else
1925 rtx errno_rtx
1926 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1927 #endif
1928 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1929 emit_label (lab);
1930 return;
1932 #endif
1934 /* Make sure the library call isn't expanded as a tail call. */
1935 CALL_EXPR_TAILCALL (exp) = 0;
1937 /* We can't set errno=EDOM directly; let the library call do it.
1938 Pop the arguments right away in case the call gets deleted. */
1939 NO_DEFER_POP;
1940 expand_call (exp, target, 0);
1941 OK_DEFER_POP;
1942 emit_label (lab);
1945 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1946 Return NULL_RTX if a normal call should be emitted rather than expanding
1947 the function in-line. EXP is the expression that is a call to the builtin
1948 function; if convenient, the result should be placed in TARGET.
1949 SUBTARGET may be used as the target for computing one of EXP's operands. */
1951 static rtx
1952 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1954 optab builtin_optab;
1955 rtx op0, insns;
1956 tree fndecl = get_callee_fndecl (exp);
1957 enum machine_mode mode;
1958 bool errno_set = false;
1959 tree arg;
1961 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1962 return NULL_RTX;
1964 arg = CALL_EXPR_ARG (exp, 0);
1966 switch (DECL_FUNCTION_CODE (fndecl))
1968 CASE_FLT_FN (BUILT_IN_SQRT):
1969 errno_set = ! tree_expr_nonnegative_p (arg);
1970 builtin_optab = sqrt_optab;
1971 break;
1972 CASE_FLT_FN (BUILT_IN_EXP):
1973 errno_set = true; builtin_optab = exp_optab; break;
1974 CASE_FLT_FN (BUILT_IN_EXP10):
1975 CASE_FLT_FN (BUILT_IN_POW10):
1976 errno_set = true; builtin_optab = exp10_optab; break;
1977 CASE_FLT_FN (BUILT_IN_EXP2):
1978 errno_set = true; builtin_optab = exp2_optab; break;
1979 CASE_FLT_FN (BUILT_IN_EXPM1):
1980 errno_set = true; builtin_optab = expm1_optab; break;
1981 CASE_FLT_FN (BUILT_IN_LOGB):
1982 errno_set = true; builtin_optab = logb_optab; break;
1983 CASE_FLT_FN (BUILT_IN_LOG):
1984 errno_set = true; builtin_optab = log_optab; break;
1985 CASE_FLT_FN (BUILT_IN_LOG10):
1986 errno_set = true; builtin_optab = log10_optab; break;
1987 CASE_FLT_FN (BUILT_IN_LOG2):
1988 errno_set = true; builtin_optab = log2_optab; break;
1989 CASE_FLT_FN (BUILT_IN_LOG1P):
1990 errno_set = true; builtin_optab = log1p_optab; break;
1991 CASE_FLT_FN (BUILT_IN_ASIN):
1992 builtin_optab = asin_optab; break;
1993 CASE_FLT_FN (BUILT_IN_ACOS):
1994 builtin_optab = acos_optab; break;
1995 CASE_FLT_FN (BUILT_IN_TAN):
1996 builtin_optab = tan_optab; break;
1997 CASE_FLT_FN (BUILT_IN_ATAN):
1998 builtin_optab = atan_optab; break;
1999 CASE_FLT_FN (BUILT_IN_FLOOR):
2000 builtin_optab = floor_optab; break;
2001 CASE_FLT_FN (BUILT_IN_CEIL):
2002 builtin_optab = ceil_optab; break;
2003 CASE_FLT_FN (BUILT_IN_TRUNC):
2004 builtin_optab = btrunc_optab; break;
2005 CASE_FLT_FN (BUILT_IN_ROUND):
2006 builtin_optab = round_optab; break;
2007 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2008 builtin_optab = nearbyint_optab;
2009 if (flag_trapping_math)
2010 break;
2011 /* Else fallthrough and expand as rint. */
2012 CASE_FLT_FN (BUILT_IN_RINT):
2013 builtin_optab = rint_optab; break;
2014 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2015 builtin_optab = significand_optab; break;
2016 default:
2017 gcc_unreachable ();
2020 /* Make a suitable register to place result in. */
2021 mode = TYPE_MODE (TREE_TYPE (exp));
2023 if (! flag_errno_math || ! HONOR_NANS (mode))
2024 errno_set = false;
2026 /* Before working hard, check whether the instruction is available. */
2027 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2028 && (!errno_set || !optimize_insn_for_size_p ()))
2030 target = gen_reg_rtx (mode);
2032 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2033 need to expand the argument again. This way, we will not perform
2034 side-effects more the once. */
2035 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2037 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2039 start_sequence ();
2041 /* Compute into TARGET.
2042 Set TARGET to wherever the result comes back. */
2043 target = expand_unop (mode, builtin_optab, op0, target, 0);
2045 if (target != 0)
2047 if (errno_set)
2048 expand_errno_check (exp, target);
2050 /* Output the entire sequence. */
2051 insns = get_insns ();
2052 end_sequence ();
2053 emit_insn (insns);
2054 return target;
2057 /* If we were unable to expand via the builtin, stop the sequence
2058 (without outputting the insns) and call to the library function
2059 with the stabilized argument list. */
2060 end_sequence ();
2063 return expand_call (exp, target, target == const0_rtx);
2066 /* Expand a call to the builtin binary math functions (pow and atan2).
2067 Return NULL_RTX if a normal call should be emitted rather than expanding the
2068 function in-line. EXP is the expression that is a call to the builtin
2069 function; if convenient, the result should be placed in TARGET.
2070 SUBTARGET may be used as the target for computing one of EXP's
2071 operands. */
2073 static rtx
2074 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2076 optab builtin_optab;
2077 rtx op0, op1, insns;
2078 int op1_type = REAL_TYPE;
2079 tree fndecl = get_callee_fndecl (exp);
2080 tree arg0, arg1;
2081 enum machine_mode mode;
2082 bool errno_set = true;
2084 switch (DECL_FUNCTION_CODE (fndecl))
2086 CASE_FLT_FN (BUILT_IN_SCALBN):
2087 CASE_FLT_FN (BUILT_IN_SCALBLN):
2088 CASE_FLT_FN (BUILT_IN_LDEXP):
2089 op1_type = INTEGER_TYPE;
2090 default:
2091 break;
2094 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2095 return NULL_RTX;
2097 arg0 = CALL_EXPR_ARG (exp, 0);
2098 arg1 = CALL_EXPR_ARG (exp, 1);
2100 switch (DECL_FUNCTION_CODE (fndecl))
2102 CASE_FLT_FN (BUILT_IN_POW):
2103 builtin_optab = pow_optab; break;
2104 CASE_FLT_FN (BUILT_IN_ATAN2):
2105 builtin_optab = atan2_optab; break;
2106 CASE_FLT_FN (BUILT_IN_SCALB):
2107 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2108 return 0;
2109 builtin_optab = scalb_optab; break;
2110 CASE_FLT_FN (BUILT_IN_SCALBN):
2111 CASE_FLT_FN (BUILT_IN_SCALBLN):
2112 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2113 return 0;
2114 /* Fall through... */
2115 CASE_FLT_FN (BUILT_IN_LDEXP):
2116 builtin_optab = ldexp_optab; break;
2117 CASE_FLT_FN (BUILT_IN_FMOD):
2118 builtin_optab = fmod_optab; break;
2119 CASE_FLT_FN (BUILT_IN_REMAINDER):
2120 CASE_FLT_FN (BUILT_IN_DREM):
2121 builtin_optab = remainder_optab; break;
2122 default:
2123 gcc_unreachable ();
2126 /* Make a suitable register to place result in. */
2127 mode = TYPE_MODE (TREE_TYPE (exp));
2129 /* Before working hard, check whether the instruction is available. */
2130 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2131 return NULL_RTX;
2133 target = gen_reg_rtx (mode);
2135 if (! flag_errno_math || ! HONOR_NANS (mode))
2136 errno_set = false;
2138 if (errno_set && optimize_insn_for_size_p ())
2139 return 0;
2141 /* Always stabilize the argument list. */
2142 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2143 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2145 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2146 op1 = expand_normal (arg1);
2148 start_sequence ();
2150 /* Compute into TARGET.
2151 Set TARGET to wherever the result comes back. */
2152 target = expand_binop (mode, builtin_optab, op0, op1,
2153 target, 0, OPTAB_DIRECT);
2155 /* If we were unable to expand via the builtin, stop the sequence
2156 (without outputting the insns) and call to the library function
2157 with the stabilized argument list. */
2158 if (target == 0)
2160 end_sequence ();
2161 return expand_call (exp, target, target == const0_rtx);
2164 if (errno_set)
2165 expand_errno_check (exp, target);
2167 /* Output the entire sequence. */
2168 insns = get_insns ();
2169 end_sequence ();
2170 emit_insn (insns);
2172 return target;
2175 /* Expand a call to the builtin trinary math functions (fma).
2176 Return NULL_RTX if a normal call should be emitted rather than expanding the
2177 function in-line. EXP is the expression that is a call to the builtin
2178 function; if convenient, the result should be placed in TARGET.
2179 SUBTARGET may be used as the target for computing one of EXP's
2180 operands. */
2182 static rtx
2183 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2185 optab builtin_optab;
2186 rtx op0, op1, op2, insns;
2187 tree fndecl = get_callee_fndecl (exp);
2188 tree arg0, arg1, arg2;
2189 enum machine_mode mode;
2191 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2192 return NULL_RTX;
2194 arg0 = CALL_EXPR_ARG (exp, 0);
2195 arg1 = CALL_EXPR_ARG (exp, 1);
2196 arg2 = CALL_EXPR_ARG (exp, 2);
2198 switch (DECL_FUNCTION_CODE (fndecl))
2200 CASE_FLT_FN (BUILT_IN_FMA):
2201 builtin_optab = fma_optab; break;
2202 default:
2203 gcc_unreachable ();
2206 /* Make a suitable register to place result in. */
2207 mode = TYPE_MODE (TREE_TYPE (exp));
2209 /* Before working hard, check whether the instruction is available. */
2210 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2211 return NULL_RTX;
2213 target = gen_reg_rtx (mode);
2215 /* Always stabilize the argument list. */
2216 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2217 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2218 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2220 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2221 op1 = expand_normal (arg1);
2222 op2 = expand_normal (arg2);
2224 start_sequence ();
2226 /* Compute into TARGET.
2227 Set TARGET to wherever the result comes back. */
2228 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2229 target, 0);
2231 /* If we were unable to expand via the builtin, stop the sequence
2232 (without outputting the insns) and call to the library function
2233 with the stabilized argument list. */
2234 if (target == 0)
2236 end_sequence ();
2237 return expand_call (exp, target, target == const0_rtx);
2240 /* Output the entire sequence. */
2241 insns = get_insns ();
2242 end_sequence ();
2243 emit_insn (insns);
2245 return target;
2248 /* Expand a call to the builtin sin and cos math functions.
2249 Return NULL_RTX if a normal call should be emitted rather than expanding the
2250 function in-line. EXP is the expression that is a call to the builtin
2251 function; if convenient, the result should be placed in TARGET.
2252 SUBTARGET may be used as the target for computing one of EXP's
2253 operands. */
2255 static rtx
2256 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2258 optab builtin_optab;
2259 rtx op0, insns;
2260 tree fndecl = get_callee_fndecl (exp);
2261 enum machine_mode mode;
2262 tree arg;
2264 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2265 return NULL_RTX;
2267 arg = CALL_EXPR_ARG (exp, 0);
2269 switch (DECL_FUNCTION_CODE (fndecl))
2271 CASE_FLT_FN (BUILT_IN_SIN):
2272 CASE_FLT_FN (BUILT_IN_COS):
2273 builtin_optab = sincos_optab; break;
2274 default:
2275 gcc_unreachable ();
2278 /* Make a suitable register to place result in. */
2279 mode = TYPE_MODE (TREE_TYPE (exp));
2281 /* Check if sincos insn is available, otherwise fallback
2282 to sin or cos insn. */
2283 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2284 switch (DECL_FUNCTION_CODE (fndecl))
2286 CASE_FLT_FN (BUILT_IN_SIN):
2287 builtin_optab = sin_optab; break;
2288 CASE_FLT_FN (BUILT_IN_COS):
2289 builtin_optab = cos_optab; break;
2290 default:
2291 gcc_unreachable ();
2294 /* Before working hard, check whether the instruction is available. */
2295 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2297 target = gen_reg_rtx (mode);
2299 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2300 need to expand the argument again. This way, we will not perform
2301 side-effects more the once. */
2302 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2304 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2306 start_sequence ();
2308 /* Compute into TARGET.
2309 Set TARGET to wherever the result comes back. */
2310 if (builtin_optab == sincos_optab)
2312 int result;
2314 switch (DECL_FUNCTION_CODE (fndecl))
2316 CASE_FLT_FN (BUILT_IN_SIN):
2317 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2318 break;
2319 CASE_FLT_FN (BUILT_IN_COS):
2320 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2321 break;
2322 default:
2323 gcc_unreachable ();
2325 gcc_assert (result);
2327 else
2329 target = expand_unop (mode, builtin_optab, op0, target, 0);
2332 if (target != 0)
2334 /* Output the entire sequence. */
2335 insns = get_insns ();
2336 end_sequence ();
2337 emit_insn (insns);
2338 return target;
2341 /* If we were unable to expand via the builtin, stop the sequence
2342 (without outputting the insns) and call to the library function
2343 with the stabilized argument list. */
2344 end_sequence ();
2347 target = expand_call (exp, target, target == const0_rtx);
2349 return target;
2352 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2353 return an RTL instruction code that implements the functionality.
2354 If that isn't possible or available return CODE_FOR_nothing. */
2356 static enum insn_code
2357 interclass_mathfn_icode (tree arg, tree fndecl)
2359 bool errno_set = false;
2360 optab builtin_optab = 0;
2361 enum machine_mode mode;
2363 switch (DECL_FUNCTION_CODE (fndecl))
2365 CASE_FLT_FN (BUILT_IN_ILOGB):
2366 errno_set = true; builtin_optab = ilogb_optab; break;
2367 CASE_FLT_FN (BUILT_IN_ISINF):
2368 builtin_optab = isinf_optab; break;
2369 case BUILT_IN_ISNORMAL:
2370 case BUILT_IN_ISFINITE:
2371 CASE_FLT_FN (BUILT_IN_FINITE):
2372 case BUILT_IN_FINITED32:
2373 case BUILT_IN_FINITED64:
2374 case BUILT_IN_FINITED128:
2375 case BUILT_IN_ISINFD32:
2376 case BUILT_IN_ISINFD64:
2377 case BUILT_IN_ISINFD128:
2378 /* These builtins have no optabs (yet). */
2379 break;
2380 default:
2381 gcc_unreachable ();
2384 /* There's no easy way to detect the case we need to set EDOM. */
2385 if (flag_errno_math && errno_set)
2386 return CODE_FOR_nothing;
2388 /* Optab mode depends on the mode of the input argument. */
2389 mode = TYPE_MODE (TREE_TYPE (arg));
2391 if (builtin_optab)
2392 return optab_handler (builtin_optab, mode);
2393 return CODE_FOR_nothing;
2396 /* Expand a call to one of the builtin math functions that operate on
2397 floating point argument and output an integer result (ilogb, isinf,
2398 isnan, etc).
2399 Return 0 if a normal call should be emitted rather than expanding the
2400 function in-line. EXP is the expression that is a call to the builtin
2401 function; if convenient, the result should be placed in TARGET. */
2403 static rtx
2404 expand_builtin_interclass_mathfn (tree exp, rtx target)
2406 enum insn_code icode = CODE_FOR_nothing;
2407 rtx op0;
2408 tree fndecl = get_callee_fndecl (exp);
2409 enum machine_mode mode;
2410 tree arg;
2412 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2413 return NULL_RTX;
2415 arg = CALL_EXPR_ARG (exp, 0);
2416 icode = interclass_mathfn_icode (arg, fndecl);
2417 mode = TYPE_MODE (TREE_TYPE (arg));
2419 if (icode != CODE_FOR_nothing)
2421 struct expand_operand ops[1];
2422 rtx last = get_last_insn ();
2423 tree orig_arg = arg;
2425 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2426 need to expand the argument again. This way, we will not perform
2427 side-effects more the once. */
2428 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2430 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2432 if (mode != GET_MODE (op0))
2433 op0 = convert_to_mode (mode, op0, 0);
2435 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2436 if (maybe_legitimize_operands (icode, 0, 1, ops)
2437 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2438 return ops[0].value;
2440 delete_insns_since (last);
2441 CALL_EXPR_ARG (exp, 0) = orig_arg;
2444 return NULL_RTX;
2447 /* Expand a call to the builtin sincos math function.
2448 Return NULL_RTX if a normal call should be emitted rather than expanding the
2449 function in-line. EXP is the expression that is a call to the builtin
2450 function. */
2452 static rtx
2453 expand_builtin_sincos (tree exp)
2455 rtx op0, op1, op2, target1, target2;
2456 enum machine_mode mode;
2457 tree arg, sinp, cosp;
2458 int result;
2459 location_t loc = EXPR_LOCATION (exp);
2460 tree alias_type, alias_off;
2462 if (!validate_arglist (exp, REAL_TYPE,
2463 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2464 return NULL_RTX;
2466 arg = CALL_EXPR_ARG (exp, 0);
2467 sinp = CALL_EXPR_ARG (exp, 1);
2468 cosp = CALL_EXPR_ARG (exp, 2);
2470 /* Make a suitable register to place result in. */
2471 mode = TYPE_MODE (TREE_TYPE (arg));
2473 /* Check if sincos insn is available, otherwise emit the call. */
2474 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2475 return NULL_RTX;
2477 target1 = gen_reg_rtx (mode);
2478 target2 = gen_reg_rtx (mode);
2480 op0 = expand_normal (arg);
2481 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2482 alias_off = build_int_cst (alias_type, 0);
2483 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2484 sinp, alias_off));
2485 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2486 cosp, alias_off));
2488 /* Compute into target1 and target2.
2489 Set TARGET to wherever the result comes back. */
2490 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2491 gcc_assert (result);
2493 /* Move target1 and target2 to the memory locations indicated
2494 by op1 and op2. */
2495 emit_move_insn (op1, target1);
2496 emit_move_insn (op2, target2);
2498 return const0_rtx;
2501 /* Expand a call to the internal cexpi builtin to the sincos math function.
2502 EXP is the expression that is a call to the builtin function; if convenient,
2503 the result should be placed in TARGET. */
2505 static rtx
2506 expand_builtin_cexpi (tree exp, rtx target)
2508 tree fndecl = get_callee_fndecl (exp);
2509 tree arg, type;
2510 enum machine_mode mode;
2511 rtx op0, op1, op2;
2512 location_t loc = EXPR_LOCATION (exp);
2514 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2515 return NULL_RTX;
2517 arg = CALL_EXPR_ARG (exp, 0);
2518 type = TREE_TYPE (arg);
2519 mode = TYPE_MODE (TREE_TYPE (arg));
2521 /* Try expanding via a sincos optab, fall back to emitting a libcall
2522 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2523 is only generated from sincos, cexp or if we have either of them. */
2524 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2526 op1 = gen_reg_rtx (mode);
2527 op2 = gen_reg_rtx (mode);
2529 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2531 /* Compute into op1 and op2. */
2532 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2534 else if (TARGET_HAS_SINCOS)
2536 tree call, fn = NULL_TREE;
2537 tree top1, top2;
2538 rtx op1a, op2a;
2540 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2541 fn = built_in_decls[BUILT_IN_SINCOSF];
2542 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2543 fn = built_in_decls[BUILT_IN_SINCOS];
2544 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2545 fn = built_in_decls[BUILT_IN_SINCOSL];
2546 else
2547 gcc_unreachable ();
2549 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2550 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2551 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2552 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2553 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2554 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2556 /* Make sure not to fold the sincos call again. */
2557 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2558 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2559 call, 3, arg, top1, top2));
2561 else
2563 tree call, fn = NULL_TREE, narg;
2564 tree ctype = build_complex_type (type);
2566 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2567 fn = built_in_decls[BUILT_IN_CEXPF];
2568 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2569 fn = built_in_decls[BUILT_IN_CEXP];
2570 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2571 fn = built_in_decls[BUILT_IN_CEXPL];
2572 else
2573 gcc_unreachable ();
2575 /* If we don't have a decl for cexp create one. This is the
2576 friendliest fallback if the user calls __builtin_cexpi
2577 without full target C99 function support. */
2578 if (fn == NULL_TREE)
2580 tree fntype;
2581 const char *name = NULL;
2583 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2584 name = "cexpf";
2585 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2586 name = "cexp";
2587 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2588 name = "cexpl";
2590 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2591 fn = build_fn_decl (name, fntype);
2594 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2595 build_real (type, dconst0), arg);
2597 /* Make sure not to fold the cexp call again. */
2598 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2599 return expand_expr (build_call_nary (ctype, call, 1, narg),
2600 target, VOIDmode, EXPAND_NORMAL);
2603 /* Now build the proper return type. */
2604 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2605 make_tree (TREE_TYPE (arg), op2),
2606 make_tree (TREE_TYPE (arg), op1)),
2607 target, VOIDmode, EXPAND_NORMAL);
2610 /* Conveniently construct a function call expression. FNDECL names the
2611 function to be called, N is the number of arguments, and the "..."
2612 parameters are the argument expressions. Unlike build_call_exr
2613 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2615 static tree
2616 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2618 va_list ap;
2619 tree fntype = TREE_TYPE (fndecl);
2620 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2622 va_start (ap, n);
2623 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2624 va_end (ap);
2625 SET_EXPR_LOCATION (fn, loc);
2626 return fn;
2629 /* Expand a call to one of the builtin rounding functions gcc defines
2630 as an extension (lfloor and lceil). As these are gcc extensions we
2631 do not need to worry about setting errno to EDOM.
2632 If expanding via optab fails, lower expression to (int)(floor(x)).
2633 EXP is the expression that is a call to the builtin function;
2634 if convenient, the result should be placed in TARGET. */
2636 static rtx
2637 expand_builtin_int_roundingfn (tree exp, rtx target)
2639 convert_optab builtin_optab;
2640 rtx op0, insns, tmp;
2641 tree fndecl = get_callee_fndecl (exp);
2642 enum built_in_function fallback_fn;
2643 tree fallback_fndecl;
2644 enum machine_mode mode;
2645 tree arg;
2647 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2648 gcc_unreachable ();
2650 arg = CALL_EXPR_ARG (exp, 0);
2652 switch (DECL_FUNCTION_CODE (fndecl))
2654 CASE_FLT_FN (BUILT_IN_LCEIL):
2655 CASE_FLT_FN (BUILT_IN_LLCEIL):
2656 builtin_optab = lceil_optab;
2657 fallback_fn = BUILT_IN_CEIL;
2658 break;
2660 CASE_FLT_FN (BUILT_IN_LFLOOR):
2661 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2662 builtin_optab = lfloor_optab;
2663 fallback_fn = BUILT_IN_FLOOR;
2664 break;
2666 default:
2667 gcc_unreachable ();
2670 /* Make a suitable register to place result in. */
2671 mode = TYPE_MODE (TREE_TYPE (exp));
2673 target = gen_reg_rtx (mode);
2675 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2676 need to expand the argument again. This way, we will not perform
2677 side-effects more the once. */
2678 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2680 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2682 start_sequence ();
2684 /* Compute into TARGET. */
2685 if (expand_sfix_optab (target, op0, builtin_optab))
2687 /* Output the entire sequence. */
2688 insns = get_insns ();
2689 end_sequence ();
2690 emit_insn (insns);
2691 return target;
2694 /* If we were unable to expand via the builtin, stop the sequence
2695 (without outputting the insns). */
2696 end_sequence ();
2698 /* Fall back to floating point rounding optab. */
2699 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2701 /* For non-C99 targets we may end up without a fallback fndecl here
2702 if the user called __builtin_lfloor directly. In this case emit
2703 a call to the floor/ceil variants nevertheless. This should result
2704 in the best user experience for not full C99 targets. */
2705 if (fallback_fndecl == NULL_TREE)
2707 tree fntype;
2708 const char *name = NULL;
2710 switch (DECL_FUNCTION_CODE (fndecl))
2712 case BUILT_IN_LCEIL:
2713 case BUILT_IN_LLCEIL:
2714 name = "ceil";
2715 break;
2716 case BUILT_IN_LCEILF:
2717 case BUILT_IN_LLCEILF:
2718 name = "ceilf";
2719 break;
2720 case BUILT_IN_LCEILL:
2721 case BUILT_IN_LLCEILL:
2722 name = "ceill";
2723 break;
2724 case BUILT_IN_LFLOOR:
2725 case BUILT_IN_LLFLOOR:
2726 name = "floor";
2727 break;
2728 case BUILT_IN_LFLOORF:
2729 case BUILT_IN_LLFLOORF:
2730 name = "floorf";
2731 break;
2732 case BUILT_IN_LFLOORL:
2733 case BUILT_IN_LLFLOORL:
2734 name = "floorl";
2735 break;
2736 default:
2737 gcc_unreachable ();
2740 fntype = build_function_type_list (TREE_TYPE (arg),
2741 TREE_TYPE (arg), NULL_TREE);
2742 fallback_fndecl = build_fn_decl (name, fntype);
2745 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2747 tmp = expand_normal (exp);
2749 /* Truncate the result of floating point optab to integer
2750 via expand_fix (). */
2751 target = gen_reg_rtx (mode);
2752 expand_fix (target, tmp, 0);
2754 return target;
2757 /* Expand a call to one of the builtin math functions doing integer
2758 conversion (lrint).
2759 Return 0 if a normal call should be emitted rather than expanding the
2760 function in-line. EXP is the expression that is a call to the builtin
2761 function; if convenient, the result should be placed in TARGET. */
2763 static rtx
2764 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2766 convert_optab builtin_optab;
2767 rtx op0, insns;
2768 tree fndecl = get_callee_fndecl (exp);
2769 tree arg;
2770 enum machine_mode mode;
2772 /* There's no easy way to detect the case we need to set EDOM. */
2773 if (flag_errno_math)
2774 return NULL_RTX;
2776 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2777 gcc_unreachable ();
2779 arg = CALL_EXPR_ARG (exp, 0);
2781 switch (DECL_FUNCTION_CODE (fndecl))
2783 CASE_FLT_FN (BUILT_IN_LRINT):
2784 CASE_FLT_FN (BUILT_IN_LLRINT):
2785 builtin_optab = lrint_optab; break;
2786 CASE_FLT_FN (BUILT_IN_LROUND):
2787 CASE_FLT_FN (BUILT_IN_LLROUND):
2788 builtin_optab = lround_optab; break;
2789 default:
2790 gcc_unreachable ();
2793 /* Make a suitable register to place result in. */
2794 mode = TYPE_MODE (TREE_TYPE (exp));
2796 target = gen_reg_rtx (mode);
2798 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2799 need to expand the argument again. This way, we will not perform
2800 side-effects more the once. */
2801 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2803 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2805 start_sequence ();
2807 if (expand_sfix_optab (target, op0, builtin_optab))
2809 /* Output the entire sequence. */
2810 insns = get_insns ();
2811 end_sequence ();
2812 emit_insn (insns);
2813 return target;
2816 /* If we were unable to expand via the builtin, stop the sequence
2817 (without outputting the insns) and call to the library function
2818 with the stabilized argument list. */
2819 end_sequence ();
2821 target = expand_call (exp, target, target == const0_rtx);
2823 return target;
2826 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2827 a normal call should be emitted rather than expanding the function
2828 in-line. EXP is the expression that is a call to the builtin
2829 function; if convenient, the result should be placed in TARGET. */
2831 static rtx
2832 expand_builtin_powi (tree exp, rtx target)
2834 tree arg0, arg1;
2835 rtx op0, op1;
2836 enum machine_mode mode;
2837 enum machine_mode mode2;
2839 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2840 return NULL_RTX;
2842 arg0 = CALL_EXPR_ARG (exp, 0);
2843 arg1 = CALL_EXPR_ARG (exp, 1);
2844 mode = TYPE_MODE (TREE_TYPE (exp));
2846 /* Emit a libcall to libgcc. */
2848 /* Mode of the 2nd argument must match that of an int. */
2849 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2851 if (target == NULL_RTX)
2852 target = gen_reg_rtx (mode);
2854 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2855 if (GET_MODE (op0) != mode)
2856 op0 = convert_to_mode (mode, op0, 0);
2857 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2858 if (GET_MODE (op1) != mode2)
2859 op1 = convert_to_mode (mode2, op1, 0);
2861 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2862 target, LCT_CONST, mode, 2,
2863 op0, mode, op1, mode2);
2865 return target;
2868 /* Expand expression EXP which is a call to the strlen builtin. Return
2869 NULL_RTX if we failed the caller should emit a normal call, otherwise
2870 try to get the result in TARGET, if convenient. */
2872 static rtx
2873 expand_builtin_strlen (tree exp, rtx target,
2874 enum machine_mode target_mode)
2876 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2877 return NULL_RTX;
2878 else
2880 struct expand_operand ops[4];
2881 rtx pat;
2882 tree len;
2883 tree src = CALL_EXPR_ARG (exp, 0);
2884 rtx src_reg, before_strlen;
2885 enum machine_mode insn_mode = target_mode;
2886 enum insn_code icode = CODE_FOR_nothing;
2887 unsigned int align;
2889 /* If the length can be computed at compile-time, return it. */
2890 len = c_strlen (src, 0);
2891 if (len)
2892 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2894 /* If the length can be computed at compile-time and is constant
2895 integer, but there are side-effects in src, evaluate
2896 src for side-effects, then return len.
2897 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2898 can be optimized into: i++; x = 3; */
2899 len = c_strlen (src, 1);
2900 if (len && TREE_CODE (len) == INTEGER_CST)
2902 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2903 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2906 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2908 /* If SRC is not a pointer type, don't do this operation inline. */
2909 if (align == 0)
2910 return NULL_RTX;
2912 /* Bail out if we can't compute strlen in the right mode. */
2913 while (insn_mode != VOIDmode)
2915 icode = optab_handler (strlen_optab, insn_mode);
2916 if (icode != CODE_FOR_nothing)
2917 break;
2919 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2921 if (insn_mode == VOIDmode)
2922 return NULL_RTX;
2924 /* Make a place to hold the source address. We will not expand
2925 the actual source until we are sure that the expansion will
2926 not fail -- there are trees that cannot be expanded twice. */
2927 src_reg = gen_reg_rtx (Pmode);
2929 /* Mark the beginning of the strlen sequence so we can emit the
2930 source operand later. */
2931 before_strlen = get_last_insn ();
2933 create_output_operand (&ops[0], target, insn_mode);
2934 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2935 create_integer_operand (&ops[2], 0);
2936 create_integer_operand (&ops[3], align);
2937 if (!maybe_expand_insn (icode, 4, ops))
2938 return NULL_RTX;
2940 /* Now that we are assured of success, expand the source. */
2941 start_sequence ();
2942 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2943 if (pat != src_reg)
2945 #ifdef POINTERS_EXTEND_UNSIGNED
2946 if (GET_MODE (pat) != Pmode)
2947 pat = convert_to_mode (Pmode, pat,
2948 POINTERS_EXTEND_UNSIGNED);
2949 #endif
2950 emit_move_insn (src_reg, pat);
2952 pat = get_insns ();
2953 end_sequence ();
2955 if (before_strlen)
2956 emit_insn_after (pat, before_strlen);
2957 else
2958 emit_insn_before (pat, get_insns ());
2960 /* Return the value in the proper mode for this function. */
2961 if (GET_MODE (ops[0].value) == target_mode)
2962 target = ops[0].value;
2963 else if (target != 0)
2964 convert_move (target, ops[0].value, 0);
2965 else
2966 target = convert_to_mode (target_mode, ops[0].value, 0);
2968 return target;
2972 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2973 bytes from constant string DATA + OFFSET and return it as target
2974 constant. */
2976 static rtx
2977 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2978 enum machine_mode mode)
2980 const char *str = (const char *) data;
2982 gcc_assert (offset >= 0
2983 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2984 <= strlen (str) + 1));
2986 return c_readstr (str + offset, mode);
2989 /* Expand a call EXP to the memcpy builtin.
2990 Return NULL_RTX if we failed, the caller should emit a normal call,
2991 otherwise try to get the result in TARGET, if convenient (and in
2992 mode MODE if that's convenient). */
2994 static rtx
2995 expand_builtin_memcpy (tree exp, rtx target)
2997 if (!validate_arglist (exp,
2998 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2999 return NULL_RTX;
3000 else
3002 tree dest = CALL_EXPR_ARG (exp, 0);
3003 tree src = CALL_EXPR_ARG (exp, 1);
3004 tree len = CALL_EXPR_ARG (exp, 2);
3005 const char *src_str;
3006 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3007 unsigned int dest_align
3008 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3009 rtx dest_mem, src_mem, dest_addr, len_rtx;
3010 HOST_WIDE_INT expected_size = -1;
3011 unsigned int expected_align = 0;
3013 /* If DEST is not a pointer type, call the normal function. */
3014 if (dest_align == 0)
3015 return NULL_RTX;
3017 /* If either SRC is not a pointer type, don't do this
3018 operation in-line. */
3019 if (src_align == 0)
3020 return NULL_RTX;
3022 if (currently_expanding_gimple_stmt)
3023 stringop_block_profile (currently_expanding_gimple_stmt,
3024 &expected_align, &expected_size);
3026 if (expected_align < dest_align)
3027 expected_align = dest_align;
3028 dest_mem = get_memory_rtx (dest, len);
3029 set_mem_align (dest_mem, dest_align);
3030 len_rtx = expand_normal (len);
3031 src_str = c_getstr (src);
3033 /* If SRC is a string constant and block move would be done
3034 by pieces, we can avoid loading the string from memory
3035 and only stored the computed constants. */
3036 if (src_str
3037 && CONST_INT_P (len_rtx)
3038 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3039 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3040 CONST_CAST (char *, src_str),
3041 dest_align, false))
3043 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3044 builtin_memcpy_read_str,
3045 CONST_CAST (char *, src_str),
3046 dest_align, false, 0);
3047 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3048 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3049 return dest_mem;
3052 src_mem = get_memory_rtx (src, len);
3053 set_mem_align (src_mem, src_align);
3055 /* Copy word part most expediently. */
3056 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3057 CALL_EXPR_TAILCALL (exp)
3058 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3059 expected_align, expected_size);
3061 if (dest_addr == 0)
3063 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3064 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3066 return dest_addr;
3070 /* Expand a call EXP to the mempcpy builtin.
3071 Return NULL_RTX if we failed; the caller should emit a normal call,
3072 otherwise try to get the result in TARGET, if convenient (and in
3073 mode MODE if that's convenient). If ENDP is 0 return the
3074 destination pointer, if ENDP is 1 return the end pointer ala
3075 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3076 stpcpy. */
3078 static rtx
3079 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3081 if (!validate_arglist (exp,
3082 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3083 return NULL_RTX;
3084 else
3086 tree dest = CALL_EXPR_ARG (exp, 0);
3087 tree src = CALL_EXPR_ARG (exp, 1);
3088 tree len = CALL_EXPR_ARG (exp, 2);
3089 return expand_builtin_mempcpy_args (dest, src, len,
3090 target, mode, /*endp=*/ 1);
3094 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3095 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3096 so that this can also be called without constructing an actual CALL_EXPR.
3097 The other arguments and return value are the same as for
3098 expand_builtin_mempcpy. */
3100 static rtx
3101 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3102 rtx target, enum machine_mode mode, int endp)
3104 /* If return value is ignored, transform mempcpy into memcpy. */
3105 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3107 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3108 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3109 dest, src, len);
3110 return expand_expr (result, target, mode, EXPAND_NORMAL);
3112 else
3114 const char *src_str;
3115 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3116 unsigned int dest_align
3117 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3118 rtx dest_mem, src_mem, len_rtx;
3120 /* If either SRC or DEST is not a pointer type, don't do this
3121 operation in-line. */
3122 if (dest_align == 0 || src_align == 0)
3123 return NULL_RTX;
3125 /* If LEN is not constant, call the normal function. */
3126 if (! host_integerp (len, 1))
3127 return NULL_RTX;
3129 len_rtx = expand_normal (len);
3130 src_str = c_getstr (src);
3132 /* If SRC is a string constant and block move would be done
3133 by pieces, we can avoid loading the string from memory
3134 and only stored the computed constants. */
3135 if (src_str
3136 && CONST_INT_P (len_rtx)
3137 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3138 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3139 CONST_CAST (char *, src_str),
3140 dest_align, false))
3142 dest_mem = get_memory_rtx (dest, len);
3143 set_mem_align (dest_mem, dest_align);
3144 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3145 builtin_memcpy_read_str,
3146 CONST_CAST (char *, src_str),
3147 dest_align, false, endp);
3148 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3149 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3150 return dest_mem;
3153 if (CONST_INT_P (len_rtx)
3154 && can_move_by_pieces (INTVAL (len_rtx),
3155 MIN (dest_align, src_align)))
3157 dest_mem = get_memory_rtx (dest, len);
3158 set_mem_align (dest_mem, dest_align);
3159 src_mem = get_memory_rtx (src, len);
3160 set_mem_align (src_mem, src_align);
3161 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3162 MIN (dest_align, src_align), endp);
3163 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3164 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3165 return dest_mem;
3168 return NULL_RTX;
3172 #ifndef HAVE_movstr
3173 # define HAVE_movstr 0
3174 # define CODE_FOR_movstr CODE_FOR_nothing
3175 #endif
3177 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3178 we failed, the caller should emit a normal call, otherwise try to
3179 get the result in TARGET, if convenient. If ENDP is 0 return the
3180 destination pointer, if ENDP is 1 return the end pointer ala
3181 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3182 stpcpy. */
3184 static rtx
3185 expand_movstr (tree dest, tree src, rtx target, int endp)
3187 struct expand_operand ops[3];
3188 rtx dest_mem;
3189 rtx src_mem;
3191 if (!HAVE_movstr)
3192 return NULL_RTX;
3194 dest_mem = get_memory_rtx (dest, NULL);
3195 src_mem = get_memory_rtx (src, NULL);
3196 if (!endp)
3198 target = force_reg (Pmode, XEXP (dest_mem, 0));
3199 dest_mem = replace_equiv_address (dest_mem, target);
3202 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3203 create_fixed_operand (&ops[1], dest_mem);
3204 create_fixed_operand (&ops[2], src_mem);
3205 expand_insn (CODE_FOR_movstr, 3, ops);
3207 if (endp && target != const0_rtx)
3209 target = ops[0].value;
3210 /* movstr is supposed to set end to the address of the NUL
3211 terminator. If the caller requested a mempcpy-like return value,
3212 adjust it. */
3213 if (endp == 1)
3215 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), target), 1);
3216 emit_move_insn (target, force_operand (tem, NULL_RTX));
3219 return target;
3222 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3223 NULL_RTX if we failed the caller should emit a normal call, otherwise
3224 try to get the result in TARGET, if convenient (and in mode MODE if that's
3225 convenient). */
3227 static rtx
3228 expand_builtin_strcpy (tree exp, rtx target)
3230 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3232 tree dest = CALL_EXPR_ARG (exp, 0);
3233 tree src = CALL_EXPR_ARG (exp, 1);
3234 return expand_builtin_strcpy_args (dest, src, target);
3236 return NULL_RTX;
3239 /* Helper function to do the actual work for expand_builtin_strcpy. The
3240 arguments to the builtin_strcpy call DEST and SRC are broken out
3241 so that this can also be called without constructing an actual CALL_EXPR.
3242 The other arguments and return value are the same as for
3243 expand_builtin_strcpy. */
3245 static rtx
3246 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3248 return expand_movstr (dest, src, target, /*endp=*/0);
3251 /* Expand a call EXP to the stpcpy builtin.
3252 Return NULL_RTX if we failed the caller should emit a normal call,
3253 otherwise try to get the result in TARGET, if convenient (and in
3254 mode MODE if that's convenient). */
3256 static rtx
3257 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3259 tree dst, src;
3260 location_t loc = EXPR_LOCATION (exp);
3262 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3263 return NULL_RTX;
3265 dst = CALL_EXPR_ARG (exp, 0);
3266 src = CALL_EXPR_ARG (exp, 1);
3268 /* If return value is ignored, transform stpcpy into strcpy. */
3269 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3271 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3272 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3273 return expand_expr (result, target, mode, EXPAND_NORMAL);
3275 else
3277 tree len, lenp1;
3278 rtx ret;
3280 /* Ensure we get an actual string whose length can be evaluated at
3281 compile-time, not an expression containing a string. This is
3282 because the latter will potentially produce pessimized code
3283 when used to produce the return value. */
3284 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3285 return expand_movstr (dst, src, target, /*endp=*/2);
3287 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3288 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3289 target, mode, /*endp=*/2);
3291 if (ret)
3292 return ret;
3294 if (TREE_CODE (len) == INTEGER_CST)
3296 rtx len_rtx = expand_normal (len);
3298 if (CONST_INT_P (len_rtx))
3300 ret = expand_builtin_strcpy_args (dst, src, target);
3302 if (ret)
3304 if (! target)
3306 if (mode != VOIDmode)
3307 target = gen_reg_rtx (mode);
3308 else
3309 target = gen_reg_rtx (GET_MODE (ret));
3311 if (GET_MODE (target) != GET_MODE (ret))
3312 ret = gen_lowpart (GET_MODE (target), ret);
3314 ret = plus_constant (ret, INTVAL (len_rtx));
3315 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3316 gcc_assert (ret);
3318 return target;
3323 return expand_movstr (dst, src, target, /*endp=*/2);
3327 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3328 bytes from constant string DATA + OFFSET and return it as target
3329 constant. */
3332 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3333 enum machine_mode mode)
3335 const char *str = (const char *) data;
3337 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3338 return const0_rtx;
3340 return c_readstr (str + offset, mode);
3343 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3344 NULL_RTX if we failed the caller should emit a normal call. */
3346 static rtx
3347 expand_builtin_strncpy (tree exp, rtx target)
3349 location_t loc = EXPR_LOCATION (exp);
3351 if (validate_arglist (exp,
3352 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3354 tree dest = CALL_EXPR_ARG (exp, 0);
3355 tree src = CALL_EXPR_ARG (exp, 1);
3356 tree len = CALL_EXPR_ARG (exp, 2);
3357 tree slen = c_strlen (src, 1);
3359 /* We must be passed a constant len and src parameter. */
3360 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3361 return NULL_RTX;
3363 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3365 /* We're required to pad with trailing zeros if the requested
3366 len is greater than strlen(s2)+1. In that case try to
3367 use store_by_pieces, if it fails, punt. */
3368 if (tree_int_cst_lt (slen, len))
3370 unsigned int dest_align
3371 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3372 const char *p = c_getstr (src);
3373 rtx dest_mem;
3375 if (!p || dest_align == 0 || !host_integerp (len, 1)
3376 || !can_store_by_pieces (tree_low_cst (len, 1),
3377 builtin_strncpy_read_str,
3378 CONST_CAST (char *, p),
3379 dest_align, false))
3380 return NULL_RTX;
3382 dest_mem = get_memory_rtx (dest, len);
3383 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3384 builtin_strncpy_read_str,
3385 CONST_CAST (char *, p), dest_align, false, 0);
3386 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3387 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3388 return dest_mem;
3391 return NULL_RTX;
3394 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3395 bytes from constant string DATA + OFFSET and return it as target
3396 constant. */
3399 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3400 enum machine_mode mode)
3402 const char *c = (const char *) data;
3403 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3405 memset (p, *c, GET_MODE_SIZE (mode));
3407 return c_readstr (p, mode);
3410 /* Callback routine for store_by_pieces. Return the RTL of a register
3411 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3412 char value given in the RTL register data. For example, if mode is
3413 4 bytes wide, return the RTL for 0x01010101*data. */
3415 static rtx
3416 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3417 enum machine_mode mode)
3419 rtx target, coeff;
3420 size_t size;
3421 char *p;
3423 size = GET_MODE_SIZE (mode);
3424 if (size == 1)
3425 return (rtx) data;
3427 p = XALLOCAVEC (char, size);
3428 memset (p, 1, size);
3429 coeff = c_readstr (p, mode);
3431 target = convert_to_mode (mode, (rtx) data, 1);
3432 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3433 return force_reg (mode, target);
3436 /* Expand expression EXP, which is a call to the memset builtin. Return
3437 NULL_RTX if we failed the caller should emit a normal call, otherwise
3438 try to get the result in TARGET, if convenient (and in mode MODE if that's
3439 convenient). */
3441 static rtx
3442 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3444 if (!validate_arglist (exp,
3445 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3446 return NULL_RTX;
3447 else
3449 tree dest = CALL_EXPR_ARG (exp, 0);
3450 tree val = CALL_EXPR_ARG (exp, 1);
3451 tree len = CALL_EXPR_ARG (exp, 2);
3452 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3456 /* Helper function to do the actual work for expand_builtin_memset. The
3457 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3458 so that this can also be called without constructing an actual CALL_EXPR.
3459 The other arguments and return value are the same as for
3460 expand_builtin_memset. */
3462 static rtx
3463 expand_builtin_memset_args (tree dest, tree val, tree len,
3464 rtx target, enum machine_mode mode, tree orig_exp)
3466 tree fndecl, fn;
3467 enum built_in_function fcode;
3468 enum machine_mode val_mode;
3469 char c;
3470 unsigned int dest_align;
3471 rtx dest_mem, dest_addr, len_rtx;
3472 HOST_WIDE_INT expected_size = -1;
3473 unsigned int expected_align = 0;
3475 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3477 /* If DEST is not a pointer type, don't do this operation in-line. */
3478 if (dest_align == 0)
3479 return NULL_RTX;
3481 if (currently_expanding_gimple_stmt)
3482 stringop_block_profile (currently_expanding_gimple_stmt,
3483 &expected_align, &expected_size);
3485 if (expected_align < dest_align)
3486 expected_align = dest_align;
3488 /* If the LEN parameter is zero, return DEST. */
3489 if (integer_zerop (len))
3491 /* Evaluate and ignore VAL in case it has side-effects. */
3492 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3493 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3496 /* Stabilize the arguments in case we fail. */
3497 dest = builtin_save_expr (dest);
3498 val = builtin_save_expr (val);
3499 len = builtin_save_expr (len);
3501 len_rtx = expand_normal (len);
3502 dest_mem = get_memory_rtx (dest, len);
3503 val_mode = TYPE_MODE (unsigned_char_type_node);
3505 if (TREE_CODE (val) != INTEGER_CST)
3507 rtx val_rtx;
3509 val_rtx = expand_normal (val);
3510 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3512 /* Assume that we can memset by pieces if we can store
3513 * the coefficients by pieces (in the required modes).
3514 * We can't pass builtin_memset_gen_str as that emits RTL. */
3515 c = 1;
3516 if (host_integerp (len, 1)
3517 && can_store_by_pieces (tree_low_cst (len, 1),
3518 builtin_memset_read_str, &c, dest_align,
3519 true))
3521 val_rtx = force_reg (val_mode, val_rtx);
3522 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3523 builtin_memset_gen_str, val_rtx, dest_align,
3524 true, 0);
3526 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3527 dest_align, expected_align,
3528 expected_size))
3529 goto do_libcall;
3531 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3532 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3533 return dest_mem;
3536 if (target_char_cast (val, &c))
3537 goto do_libcall;
3539 if (c)
3541 if (host_integerp (len, 1)
3542 && can_store_by_pieces (tree_low_cst (len, 1),
3543 builtin_memset_read_str, &c, dest_align,
3544 true))
3545 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3546 builtin_memset_read_str, &c, dest_align, true, 0);
3547 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3548 gen_int_mode (c, val_mode),
3549 dest_align, expected_align,
3550 expected_size))
3551 goto do_libcall;
3553 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3554 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3555 return dest_mem;
3558 set_mem_align (dest_mem, dest_align);
3559 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3560 CALL_EXPR_TAILCALL (orig_exp)
3561 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3562 expected_align, expected_size);
3564 if (dest_addr == 0)
3566 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3567 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3570 return dest_addr;
3572 do_libcall:
3573 fndecl = get_callee_fndecl (orig_exp);
3574 fcode = DECL_FUNCTION_CODE (fndecl);
3575 if (fcode == BUILT_IN_MEMSET)
3576 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3577 dest, val, len);
3578 else if (fcode == BUILT_IN_BZERO)
3579 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3580 dest, len);
3581 else
3582 gcc_unreachable ();
3583 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3584 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3585 return expand_call (fn, target, target == const0_rtx);
3588 /* Expand expression EXP, which is a call to the bzero builtin. Return
3589 NULL_RTX if we failed the caller should emit a normal call. */
3591 static rtx
3592 expand_builtin_bzero (tree exp)
3594 tree dest, size;
3595 location_t loc = EXPR_LOCATION (exp);
3597 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3598 return NULL_RTX;
3600 dest = CALL_EXPR_ARG (exp, 0);
3601 size = CALL_EXPR_ARG (exp, 1);
3603 /* New argument list transforming bzero(ptr x, int y) to
3604 memset(ptr x, int 0, size_t y). This is done this way
3605 so that if it isn't expanded inline, we fallback to
3606 calling bzero instead of memset. */
3608 return expand_builtin_memset_args (dest, integer_zero_node,
3609 fold_convert_loc (loc, sizetype, size),
3610 const0_rtx, VOIDmode, exp);
3613 /* Expand expression EXP, which is a call to the memcmp built-in function.
3614 Return NULL_RTX if we failed and the
3615 caller should emit a normal call, otherwise try to get the result in
3616 TARGET, if convenient (and in mode MODE, if that's convenient). */
3618 static rtx
3619 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3620 ATTRIBUTE_UNUSED enum machine_mode mode)
3622 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3624 if (!validate_arglist (exp,
3625 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3626 return NULL_RTX;
3628 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3630 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3631 rtx result;
3632 rtx insn;
3633 tree arg1 = CALL_EXPR_ARG (exp, 0);
3634 tree arg2 = CALL_EXPR_ARG (exp, 1);
3635 tree len = CALL_EXPR_ARG (exp, 2);
3637 unsigned int arg1_align
3638 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3639 unsigned int arg2_align
3640 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3641 enum machine_mode insn_mode;
3643 #ifdef HAVE_cmpmemsi
3644 if (HAVE_cmpmemsi)
3645 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3646 else
3647 #endif
3648 #ifdef HAVE_cmpstrnsi
3649 if (HAVE_cmpstrnsi)
3650 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3651 else
3652 #endif
3653 return NULL_RTX;
3655 /* If we don't have POINTER_TYPE, call the function. */
3656 if (arg1_align == 0 || arg2_align == 0)
3657 return NULL_RTX;
3659 /* Make a place to write the result of the instruction. */
3660 result = target;
3661 if (! (result != 0
3662 && REG_P (result) && GET_MODE (result) == insn_mode
3663 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3664 result = gen_reg_rtx (insn_mode);
3666 arg1_rtx = get_memory_rtx (arg1, len);
3667 arg2_rtx = get_memory_rtx (arg2, len);
3668 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3670 /* Set MEM_SIZE as appropriate. */
3671 if (CONST_INT_P (arg3_rtx))
3673 set_mem_size (arg1_rtx, arg3_rtx);
3674 set_mem_size (arg2_rtx, arg3_rtx);
3677 #ifdef HAVE_cmpmemsi
3678 if (HAVE_cmpmemsi)
3679 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3680 GEN_INT (MIN (arg1_align, arg2_align)));
3681 else
3682 #endif
3683 #ifdef HAVE_cmpstrnsi
3684 if (HAVE_cmpstrnsi)
3685 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3686 GEN_INT (MIN (arg1_align, arg2_align)));
3687 else
3688 #endif
3689 gcc_unreachable ();
3691 if (insn)
3692 emit_insn (insn);
3693 else
3694 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3695 TYPE_MODE (integer_type_node), 3,
3696 XEXP (arg1_rtx, 0), Pmode,
3697 XEXP (arg2_rtx, 0), Pmode,
3698 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3699 TYPE_UNSIGNED (sizetype)),
3700 TYPE_MODE (sizetype));
3702 /* Return the value in the proper mode for this function. */
3703 mode = TYPE_MODE (TREE_TYPE (exp));
3704 if (GET_MODE (result) == mode)
3705 return result;
3706 else if (target != 0)
3708 convert_move (target, result, 0);
3709 return target;
3711 else
3712 return convert_to_mode (mode, result, 0);
3714 #endif
3716 return NULL_RTX;
3719 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3720 if we failed the caller should emit a normal call, otherwise try to get
3721 the result in TARGET, if convenient. */
3723 static rtx
3724 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3726 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3727 return NULL_RTX;
3729 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3730 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3731 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3733 rtx arg1_rtx, arg2_rtx;
3734 rtx result, insn = NULL_RTX;
3735 tree fndecl, fn;
3736 tree arg1 = CALL_EXPR_ARG (exp, 0);
3737 tree arg2 = CALL_EXPR_ARG (exp, 1);
3739 unsigned int arg1_align
3740 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3741 unsigned int arg2_align
3742 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3744 /* If we don't have POINTER_TYPE, call the function. */
3745 if (arg1_align == 0 || arg2_align == 0)
3746 return NULL_RTX;
3748 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3749 arg1 = builtin_save_expr (arg1);
3750 arg2 = builtin_save_expr (arg2);
3752 arg1_rtx = get_memory_rtx (arg1, NULL);
3753 arg2_rtx = get_memory_rtx (arg2, NULL);
3755 #ifdef HAVE_cmpstrsi
3756 /* Try to call cmpstrsi. */
3757 if (HAVE_cmpstrsi)
3759 enum machine_mode insn_mode
3760 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3762 /* Make a place to write the result of the instruction. */
3763 result = target;
3764 if (! (result != 0
3765 && REG_P (result) && GET_MODE (result) == insn_mode
3766 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3767 result = gen_reg_rtx (insn_mode);
3769 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3770 GEN_INT (MIN (arg1_align, arg2_align)));
3772 #endif
3773 #ifdef HAVE_cmpstrnsi
3774 /* Try to determine at least one length and call cmpstrnsi. */
3775 if (!insn && HAVE_cmpstrnsi)
3777 tree len;
3778 rtx arg3_rtx;
3780 enum machine_mode insn_mode
3781 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3782 tree len1 = c_strlen (arg1, 1);
3783 tree len2 = c_strlen (arg2, 1);
3785 if (len1)
3786 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3787 if (len2)
3788 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3790 /* If we don't have a constant length for the first, use the length
3791 of the second, if we know it. We don't require a constant for
3792 this case; some cost analysis could be done if both are available
3793 but neither is constant. For now, assume they're equally cheap,
3794 unless one has side effects. If both strings have constant lengths,
3795 use the smaller. */
3797 if (!len1)
3798 len = len2;
3799 else if (!len2)
3800 len = len1;
3801 else if (TREE_SIDE_EFFECTS (len1))
3802 len = len2;
3803 else if (TREE_SIDE_EFFECTS (len2))
3804 len = len1;
3805 else if (TREE_CODE (len1) != INTEGER_CST)
3806 len = len2;
3807 else if (TREE_CODE (len2) != INTEGER_CST)
3808 len = len1;
3809 else if (tree_int_cst_lt (len1, len2))
3810 len = len1;
3811 else
3812 len = len2;
3814 /* If both arguments have side effects, we cannot optimize. */
3815 if (!len || TREE_SIDE_EFFECTS (len))
3816 goto do_libcall;
3818 arg3_rtx = expand_normal (len);
3820 /* Make a place to write the result of the instruction. */
3821 result = target;
3822 if (! (result != 0
3823 && REG_P (result) && GET_MODE (result) == insn_mode
3824 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3825 result = gen_reg_rtx (insn_mode);
3827 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3828 GEN_INT (MIN (arg1_align, arg2_align)));
3830 #endif
3832 if (insn)
3834 enum machine_mode mode;
3835 emit_insn (insn);
3837 /* Return the value in the proper mode for this function. */
3838 mode = TYPE_MODE (TREE_TYPE (exp));
3839 if (GET_MODE (result) == mode)
3840 return result;
3841 if (target == 0)
3842 return convert_to_mode (mode, result, 0);
3843 convert_move (target, result, 0);
3844 return target;
3847 /* Expand the library call ourselves using a stabilized argument
3848 list to avoid re-evaluating the function's arguments twice. */
3849 #ifdef HAVE_cmpstrnsi
3850 do_libcall:
3851 #endif
3852 fndecl = get_callee_fndecl (exp);
3853 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3854 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3855 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3856 return expand_call (fn, target, target == const0_rtx);
3858 #endif
3859 return NULL_RTX;
3862 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3863 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3864 the result in TARGET, if convenient. */
3866 static rtx
3867 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3868 ATTRIBUTE_UNUSED enum machine_mode mode)
3870 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3872 if (!validate_arglist (exp,
3873 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3874 return NULL_RTX;
3876 /* If c_strlen can determine an expression for one of the string
3877 lengths, and it doesn't have side effects, then emit cmpstrnsi
3878 using length MIN(strlen(string)+1, arg3). */
3879 #ifdef HAVE_cmpstrnsi
3880 if (HAVE_cmpstrnsi)
3882 tree len, len1, len2;
3883 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3884 rtx result, insn;
3885 tree fndecl, fn;
3886 tree arg1 = CALL_EXPR_ARG (exp, 0);
3887 tree arg2 = CALL_EXPR_ARG (exp, 1);
3888 tree arg3 = CALL_EXPR_ARG (exp, 2);
3890 unsigned int arg1_align
3891 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3892 unsigned int arg2_align
3893 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3894 enum machine_mode insn_mode
3895 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3897 len1 = c_strlen (arg1, 1);
3898 len2 = c_strlen (arg2, 1);
3900 if (len1)
3901 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3902 if (len2)
3903 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3905 /* If we don't have a constant length for the first, use the length
3906 of the second, if we know it. We don't require a constant for
3907 this case; some cost analysis could be done if both are available
3908 but neither is constant. For now, assume they're equally cheap,
3909 unless one has side effects. If both strings have constant lengths,
3910 use the smaller. */
3912 if (!len1)
3913 len = len2;
3914 else if (!len2)
3915 len = len1;
3916 else if (TREE_SIDE_EFFECTS (len1))
3917 len = len2;
3918 else if (TREE_SIDE_EFFECTS (len2))
3919 len = len1;
3920 else if (TREE_CODE (len1) != INTEGER_CST)
3921 len = len2;
3922 else if (TREE_CODE (len2) != INTEGER_CST)
3923 len = len1;
3924 else if (tree_int_cst_lt (len1, len2))
3925 len = len1;
3926 else
3927 len = len2;
3929 /* If both arguments have side effects, we cannot optimize. */
3930 if (!len || TREE_SIDE_EFFECTS (len))
3931 return NULL_RTX;
3933 /* The actual new length parameter is MIN(len,arg3). */
3934 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3935 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3937 /* If we don't have POINTER_TYPE, call the function. */
3938 if (arg1_align == 0 || arg2_align == 0)
3939 return NULL_RTX;
3941 /* Make a place to write the result of the instruction. */
3942 result = target;
3943 if (! (result != 0
3944 && REG_P (result) && GET_MODE (result) == insn_mode
3945 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3946 result = gen_reg_rtx (insn_mode);
3948 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3949 arg1 = builtin_save_expr (arg1);
3950 arg2 = builtin_save_expr (arg2);
3951 len = builtin_save_expr (len);
3953 arg1_rtx = get_memory_rtx (arg1, len);
3954 arg2_rtx = get_memory_rtx (arg2, len);
3955 arg3_rtx = expand_normal (len);
3956 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3957 GEN_INT (MIN (arg1_align, arg2_align)));
3958 if (insn)
3960 emit_insn (insn);
3962 /* Return the value in the proper mode for this function. */
3963 mode = TYPE_MODE (TREE_TYPE (exp));
3964 if (GET_MODE (result) == mode)
3965 return result;
3966 if (target == 0)
3967 return convert_to_mode (mode, result, 0);
3968 convert_move (target, result, 0);
3969 return target;
3972 /* Expand the library call ourselves using a stabilized argument
3973 list to avoid re-evaluating the function's arguments twice. */
3974 fndecl = get_callee_fndecl (exp);
3975 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3976 arg1, arg2, len);
3977 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3978 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3979 return expand_call (fn, target, target == const0_rtx);
3981 #endif
3982 return NULL_RTX;
3985 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3986 if that's convenient. */
3989 expand_builtin_saveregs (void)
3991 rtx val, seq;
3993 /* Don't do __builtin_saveregs more than once in a function.
3994 Save the result of the first call and reuse it. */
3995 if (saveregs_value != 0)
3996 return saveregs_value;
3998 /* When this function is called, it means that registers must be
3999 saved on entry to this function. So we migrate the call to the
4000 first insn of this function. */
4002 start_sequence ();
4004 /* Do whatever the machine needs done in this case. */
4005 val = targetm.calls.expand_builtin_saveregs ();
4007 seq = get_insns ();
4008 end_sequence ();
4010 saveregs_value = val;
4012 /* Put the insns after the NOTE that starts the function. If this
4013 is inside a start_sequence, make the outer-level insn chain current, so
4014 the code is placed at the start of the function. */
4015 push_topmost_sequence ();
4016 emit_insn_after (seq, entry_of_function ());
4017 pop_topmost_sequence ();
4019 return val;
4022 /* Expand a call to __builtin_next_arg. */
4024 static rtx
4025 expand_builtin_next_arg (void)
4027 /* Checking arguments is already done in fold_builtin_next_arg
4028 that must be called before this function. */
4029 return expand_binop (ptr_mode, add_optab,
4030 crtl->args.internal_arg_pointer,
4031 crtl->args.arg_offset_rtx,
4032 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4035 /* Make it easier for the backends by protecting the valist argument
4036 from multiple evaluations. */
4038 static tree
4039 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4041 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4043 /* The current way of determining the type of valist is completely
4044 bogus. We should have the information on the va builtin instead. */
4045 if (!vatype)
4046 vatype = targetm.fn_abi_va_list (cfun->decl);
4048 if (TREE_CODE (vatype) == ARRAY_TYPE)
4050 if (TREE_SIDE_EFFECTS (valist))
4051 valist = save_expr (valist);
4053 /* For this case, the backends will be expecting a pointer to
4054 vatype, but it's possible we've actually been given an array
4055 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4056 So fix it. */
4057 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4059 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4060 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4063 else
4065 tree pt = build_pointer_type (vatype);
4067 if (! needs_lvalue)
4069 if (! TREE_SIDE_EFFECTS (valist))
4070 return valist;
4072 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4073 TREE_SIDE_EFFECTS (valist) = 1;
4076 if (TREE_SIDE_EFFECTS (valist))
4077 valist = save_expr (valist);
4078 valist = fold_build2_loc (loc, MEM_REF,
4079 vatype, valist, build_int_cst (pt, 0));
4082 return valist;
4085 /* The "standard" definition of va_list is void*. */
4087 tree
4088 std_build_builtin_va_list (void)
4090 return ptr_type_node;
4093 /* The "standard" abi va_list is va_list_type_node. */
4095 tree
4096 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4098 return va_list_type_node;
4101 /* The "standard" type of va_list is va_list_type_node. */
4103 tree
4104 std_canonical_va_list_type (tree type)
4106 tree wtype, htype;
4108 if (INDIRECT_REF_P (type))
4109 type = TREE_TYPE (type);
4110 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4111 type = TREE_TYPE (type);
4112 wtype = va_list_type_node;
4113 htype = type;
4114 /* Treat structure va_list types. */
4115 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4116 htype = TREE_TYPE (htype);
4117 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4119 /* If va_list is an array type, the argument may have decayed
4120 to a pointer type, e.g. by being passed to another function.
4121 In that case, unwrap both types so that we can compare the
4122 underlying records. */
4123 if (TREE_CODE (htype) == ARRAY_TYPE
4124 || POINTER_TYPE_P (htype))
4126 wtype = TREE_TYPE (wtype);
4127 htype = TREE_TYPE (htype);
4130 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4131 return va_list_type_node;
4133 return NULL_TREE;
4136 /* The "standard" implementation of va_start: just assign `nextarg' to
4137 the variable. */
4139 void
4140 std_expand_builtin_va_start (tree valist, rtx nextarg)
4142 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4143 convert_move (va_r, nextarg, 0);
4146 /* Expand EXP, a call to __builtin_va_start. */
4148 static rtx
4149 expand_builtin_va_start (tree exp)
4151 rtx nextarg;
4152 tree valist;
4153 location_t loc = EXPR_LOCATION (exp);
4155 if (call_expr_nargs (exp) < 2)
4157 error_at (loc, "too few arguments to function %<va_start%>");
4158 return const0_rtx;
4161 if (fold_builtin_next_arg (exp, true))
4162 return const0_rtx;
4164 nextarg = expand_builtin_next_arg ();
4165 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4167 if (targetm.expand_builtin_va_start)
4168 targetm.expand_builtin_va_start (valist, nextarg);
4169 else
4170 std_expand_builtin_va_start (valist, nextarg);
4172 return const0_rtx;
4175 /* The "standard" implementation of va_arg: read the value from the
4176 current (padded) address and increment by the (padded) size. */
4178 tree
4179 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4180 gimple_seq *post_p)
4182 tree addr, t, type_size, rounded_size, valist_tmp;
4183 unsigned HOST_WIDE_INT align, boundary;
4184 bool indirect;
4186 #ifdef ARGS_GROW_DOWNWARD
4187 /* All of the alignment and movement below is for args-grow-up machines.
4188 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4189 implement their own specialized gimplify_va_arg_expr routines. */
4190 gcc_unreachable ();
4191 #endif
4193 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4194 if (indirect)
4195 type = build_pointer_type (type);
4197 align = PARM_BOUNDARY / BITS_PER_UNIT;
4198 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4200 /* When we align parameter on stack for caller, if the parameter
4201 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4202 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4203 here with caller. */
4204 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4205 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4207 boundary /= BITS_PER_UNIT;
4209 /* Hoist the valist value into a temporary for the moment. */
4210 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4212 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4213 requires greater alignment, we must perform dynamic alignment. */
4214 if (boundary > align
4215 && !integer_zerop (TYPE_SIZE (type)))
4217 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4218 fold_build2 (POINTER_PLUS_EXPR,
4219 TREE_TYPE (valist),
4220 valist_tmp, size_int (boundary - 1)));
4221 gimplify_and_add (t, pre_p);
4223 t = fold_convert (sizetype, valist_tmp);
4224 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4225 fold_convert (TREE_TYPE (valist),
4226 fold_build2 (BIT_AND_EXPR, sizetype, t,
4227 size_int (-boundary))));
4228 gimplify_and_add (t, pre_p);
4230 else
4231 boundary = align;
4233 /* If the actual alignment is less than the alignment of the type,
4234 adjust the type accordingly so that we don't assume strict alignment
4235 when dereferencing the pointer. */
4236 boundary *= BITS_PER_UNIT;
4237 if (boundary < TYPE_ALIGN (type))
4239 type = build_variant_type_copy (type);
4240 TYPE_ALIGN (type) = boundary;
4243 /* Compute the rounded size of the type. */
4244 type_size = size_in_bytes (type);
4245 rounded_size = round_up (type_size, align);
4247 /* Reduce rounded_size so it's sharable with the postqueue. */
4248 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4250 /* Get AP. */
4251 addr = valist_tmp;
4252 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4254 /* Small args are padded downward. */
4255 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4256 rounded_size, size_int (align));
4257 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4258 size_binop (MINUS_EXPR, rounded_size, type_size));
4259 addr = fold_build2 (POINTER_PLUS_EXPR,
4260 TREE_TYPE (addr), addr, t);
4263 /* Compute new value for AP. */
4264 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4265 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4266 gimplify_and_add (t, pre_p);
4268 addr = fold_convert (build_pointer_type (type), addr);
4270 if (indirect)
4271 addr = build_va_arg_indirect_ref (addr);
4273 return build_va_arg_indirect_ref (addr);
4276 /* Build an indirect-ref expression over the given TREE, which represents a
4277 piece of a va_arg() expansion. */
4278 tree
4279 build_va_arg_indirect_ref (tree addr)
4281 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4283 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4284 mf_mark (addr);
4286 return addr;
4289 /* Return a dummy expression of type TYPE in order to keep going after an
4290 error. */
4292 static tree
4293 dummy_object (tree type)
4295 tree t = build_int_cst (build_pointer_type (type), 0);
4296 return build2 (MEM_REF, type, t, t);
4299 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4300 builtin function, but a very special sort of operator. */
4302 enum gimplify_status
4303 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4305 tree promoted_type, have_va_type;
4306 tree valist = TREE_OPERAND (*expr_p, 0);
4307 tree type = TREE_TYPE (*expr_p);
4308 tree t;
4309 location_t loc = EXPR_LOCATION (*expr_p);
4311 /* Verify that valist is of the proper type. */
4312 have_va_type = TREE_TYPE (valist);
4313 if (have_va_type == error_mark_node)
4314 return GS_ERROR;
4315 have_va_type = targetm.canonical_va_list_type (have_va_type);
4317 if (have_va_type == NULL_TREE)
4319 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4320 return GS_ERROR;
4323 /* Generate a diagnostic for requesting data of a type that cannot
4324 be passed through `...' due to type promotion at the call site. */
4325 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4326 != type)
4328 static bool gave_help;
4329 bool warned;
4331 /* Unfortunately, this is merely undefined, rather than a constraint
4332 violation, so we cannot make this an error. If this call is never
4333 executed, the program is still strictly conforming. */
4334 warned = warning_at (loc, 0,
4335 "%qT is promoted to %qT when passed through %<...%>",
4336 type, promoted_type);
4337 if (!gave_help && warned)
4339 gave_help = true;
4340 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4341 promoted_type, type);
4344 /* We can, however, treat "undefined" any way we please.
4345 Call abort to encourage the user to fix the program. */
4346 if (warned)
4347 inform (loc, "if this code is reached, the program will abort");
4348 /* Before the abort, allow the evaluation of the va_list
4349 expression to exit or longjmp. */
4350 gimplify_and_add (valist, pre_p);
4351 t = build_call_expr_loc (loc,
4352 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4353 gimplify_and_add (t, pre_p);
4355 /* This is dead code, but go ahead and finish so that the
4356 mode of the result comes out right. */
4357 *expr_p = dummy_object (type);
4358 return GS_ALL_DONE;
4360 else
4362 /* Make it easier for the backends by protecting the valist argument
4363 from multiple evaluations. */
4364 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4366 /* For this case, the backends will be expecting a pointer to
4367 TREE_TYPE (abi), but it's possible we've
4368 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4369 So fix it. */
4370 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4372 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4373 valist = fold_convert_loc (loc, p1,
4374 build_fold_addr_expr_loc (loc, valist));
4377 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4379 else
4380 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4382 if (!targetm.gimplify_va_arg_expr)
4383 /* FIXME: Once most targets are converted we should merely
4384 assert this is non-null. */
4385 return GS_ALL_DONE;
4387 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4388 return GS_OK;
4392 /* Expand EXP, a call to __builtin_va_end. */
4394 static rtx
4395 expand_builtin_va_end (tree exp)
4397 tree valist = CALL_EXPR_ARG (exp, 0);
4399 /* Evaluate for side effects, if needed. I hate macros that don't
4400 do that. */
4401 if (TREE_SIDE_EFFECTS (valist))
4402 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4404 return const0_rtx;
4407 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4408 builtin rather than just as an assignment in stdarg.h because of the
4409 nastiness of array-type va_list types. */
4411 static rtx
4412 expand_builtin_va_copy (tree exp)
4414 tree dst, src, t;
4415 location_t loc = EXPR_LOCATION (exp);
4417 dst = CALL_EXPR_ARG (exp, 0);
4418 src = CALL_EXPR_ARG (exp, 1);
4420 dst = stabilize_va_list_loc (loc, dst, 1);
4421 src = stabilize_va_list_loc (loc, src, 0);
4423 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4425 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4427 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4428 TREE_SIDE_EFFECTS (t) = 1;
4429 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4431 else
4433 rtx dstb, srcb, size;
4435 /* Evaluate to pointers. */
4436 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4437 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4438 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4439 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4441 dstb = convert_memory_address (Pmode, dstb);
4442 srcb = convert_memory_address (Pmode, srcb);
4444 /* "Dereference" to BLKmode memories. */
4445 dstb = gen_rtx_MEM (BLKmode, dstb);
4446 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4447 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4448 srcb = gen_rtx_MEM (BLKmode, srcb);
4449 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4450 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4452 /* Copy. */
4453 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4456 return const0_rtx;
4459 /* Expand a call to one of the builtin functions __builtin_frame_address or
4460 __builtin_return_address. */
4462 static rtx
4463 expand_builtin_frame_address (tree fndecl, tree exp)
4465 /* The argument must be a nonnegative integer constant.
4466 It counts the number of frames to scan up the stack.
4467 The value is the return address saved in that frame. */
4468 if (call_expr_nargs (exp) == 0)
4469 /* Warning about missing arg was already issued. */
4470 return const0_rtx;
4471 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4473 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4474 error ("invalid argument to %<__builtin_frame_address%>");
4475 else
4476 error ("invalid argument to %<__builtin_return_address%>");
4477 return const0_rtx;
4479 else
4481 rtx tem
4482 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4483 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4485 /* Some ports cannot access arbitrary stack frames. */
4486 if (tem == NULL)
4488 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4489 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4490 else
4491 warning (0, "unsupported argument to %<__builtin_return_address%>");
4492 return const0_rtx;
4495 /* For __builtin_frame_address, return what we've got. */
4496 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4497 return tem;
4499 if (!REG_P (tem)
4500 && ! CONSTANT_P (tem))
4501 tem = copy_to_mode_reg (Pmode, tem);
4502 return tem;
4506 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4507 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4508 is the same as for allocate_dynamic_stack_space. */
4510 static rtx
4511 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4513 rtx op0;
4514 rtx result;
4516 /* Emit normal call if marked not-inlineable. */
4517 if (CALL_CANNOT_INLINE_P (exp))
4518 return NULL_RTX;
4520 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4521 return NULL_RTX;
4523 /* Compute the argument. */
4524 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4526 /* Allocate the desired space. */
4527 result = allocate_dynamic_stack_space (op0, 0, BIGGEST_ALIGNMENT,
4528 cannot_accumulate);
4529 result = convert_memory_address (ptr_mode, result);
4531 return result;
4534 /* Expand a call to a bswap builtin with argument ARG0. MODE
4535 is the mode to expand with. */
4537 static rtx
4538 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4540 enum machine_mode mode;
4541 tree arg;
4542 rtx op0;
4544 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4545 return NULL_RTX;
4547 arg = CALL_EXPR_ARG (exp, 0);
4548 mode = TYPE_MODE (TREE_TYPE (arg));
4549 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4551 target = expand_unop (mode, bswap_optab, op0, target, 1);
4553 gcc_assert (target);
4555 return convert_to_mode (mode, target, 0);
4558 /* Expand a call to a unary builtin in EXP.
4559 Return NULL_RTX if a normal call should be emitted rather than expanding the
4560 function in-line. If convenient, the result should be placed in TARGET.
4561 SUBTARGET may be used as the target for computing one of EXP's operands. */
4563 static rtx
4564 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4565 rtx subtarget, optab op_optab)
4567 rtx op0;
4569 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4570 return NULL_RTX;
4572 /* Compute the argument. */
4573 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4574 (subtarget
4575 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4576 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4577 VOIDmode, EXPAND_NORMAL);
4578 /* Compute op, into TARGET if possible.
4579 Set TARGET to wherever the result comes back. */
4580 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4581 op_optab, op0, target, 1);
4582 gcc_assert (target);
4584 return convert_to_mode (target_mode, target, 0);
4587 /* Expand a call to __builtin_expect. We just return our argument
4588 as the builtin_expect semantic should've been already executed by
4589 tree branch prediction pass. */
4591 static rtx
4592 expand_builtin_expect (tree exp, rtx target)
4594 tree arg;
4596 if (call_expr_nargs (exp) < 2)
4597 return const0_rtx;
4598 arg = CALL_EXPR_ARG (exp, 0);
4600 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4601 /* When guessing was done, the hints should be already stripped away. */
4602 gcc_assert (!flag_guess_branch_prob
4603 || optimize == 0 || seen_error ());
4604 return target;
4607 void
4608 expand_builtin_trap (void)
4610 #ifdef HAVE_trap
4611 if (HAVE_trap)
4612 emit_insn (gen_trap ());
4613 else
4614 #endif
4615 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4616 emit_barrier ();
4619 /* Expand a call to __builtin_unreachable. We do nothing except emit
4620 a barrier saying that control flow will not pass here.
4622 It is the responsibility of the program being compiled to ensure
4623 that control flow does never reach __builtin_unreachable. */
4624 static void
4625 expand_builtin_unreachable (void)
4627 emit_barrier ();
4630 /* Expand EXP, a call to fabs, fabsf or fabsl.
4631 Return NULL_RTX if a normal call should be emitted rather than expanding
4632 the function inline. If convenient, the result should be placed
4633 in TARGET. SUBTARGET may be used as the target for computing
4634 the operand. */
4636 static rtx
4637 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4639 enum machine_mode mode;
4640 tree arg;
4641 rtx op0;
4643 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4644 return NULL_RTX;
4646 arg = CALL_EXPR_ARG (exp, 0);
4647 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4648 mode = TYPE_MODE (TREE_TYPE (arg));
4649 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4650 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4653 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4654 Return NULL is a normal call should be emitted rather than expanding the
4655 function inline. If convenient, the result should be placed in TARGET.
4656 SUBTARGET may be used as the target for computing the operand. */
4658 static rtx
4659 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4661 rtx op0, op1;
4662 tree arg;
4664 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4665 return NULL_RTX;
4667 arg = CALL_EXPR_ARG (exp, 0);
4668 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4670 arg = CALL_EXPR_ARG (exp, 1);
4671 op1 = expand_normal (arg);
4673 return expand_copysign (op0, op1, target);
4676 /* Create a new constant string literal and return a char* pointer to it.
4677 The STRING_CST value is the LEN characters at STR. */
4678 tree
4679 build_string_literal (int len, const char *str)
4681 tree t, elem, index, type;
4683 t = build_string (len, str);
4684 elem = build_type_variant (char_type_node, 1, 0);
4685 index = build_index_type (size_int (len - 1));
4686 type = build_array_type (elem, index);
4687 TREE_TYPE (t) = type;
4688 TREE_CONSTANT (t) = 1;
4689 TREE_READONLY (t) = 1;
4690 TREE_STATIC (t) = 1;
4692 type = build_pointer_type (elem);
4693 t = build1 (ADDR_EXPR, type,
4694 build4 (ARRAY_REF, elem,
4695 t, integer_zero_node, NULL_TREE, NULL_TREE));
4696 return t;
4699 /* Expand a call to __builtin___clear_cache. */
4701 static rtx
4702 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4704 #ifndef HAVE_clear_cache
4705 #ifdef CLEAR_INSN_CACHE
4706 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4707 does something. Just do the default expansion to a call to
4708 __clear_cache(). */
4709 return NULL_RTX;
4710 #else
4711 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4712 does nothing. There is no need to call it. Do nothing. */
4713 return const0_rtx;
4714 #endif /* CLEAR_INSN_CACHE */
4715 #else
4716 /* We have a "clear_cache" insn, and it will handle everything. */
4717 tree begin, end;
4718 rtx begin_rtx, end_rtx;
4720 /* We must not expand to a library call. If we did, any
4721 fallback library function in libgcc that might contain a call to
4722 __builtin___clear_cache() would recurse infinitely. */
4723 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4725 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4726 return const0_rtx;
4729 if (HAVE_clear_cache)
4731 struct expand_operand ops[2];
4733 begin = CALL_EXPR_ARG (exp, 0);
4734 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4736 end = CALL_EXPR_ARG (exp, 1);
4737 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4739 create_address_operand (&ops[0], begin_rtx);
4740 create_address_operand (&ops[1], end_rtx);
4741 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4742 return const0_rtx;
4744 return const0_rtx;
4745 #endif /* HAVE_clear_cache */
4748 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4750 static rtx
4751 round_trampoline_addr (rtx tramp)
4753 rtx temp, addend, mask;
4755 /* If we don't need too much alignment, we'll have been guaranteed
4756 proper alignment by get_trampoline_type. */
4757 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4758 return tramp;
4760 /* Round address up to desired boundary. */
4761 temp = gen_reg_rtx (Pmode);
4762 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
4763 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
4765 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4766 temp, 0, OPTAB_LIB_WIDEN);
4767 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4768 temp, 0, OPTAB_LIB_WIDEN);
4770 return tramp;
4773 static rtx
4774 expand_builtin_init_trampoline (tree exp)
4776 tree t_tramp, t_func, t_chain;
4777 rtx m_tramp, r_tramp, r_chain, tmp;
4779 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4780 POINTER_TYPE, VOID_TYPE))
4781 return NULL_RTX;
4783 t_tramp = CALL_EXPR_ARG (exp, 0);
4784 t_func = CALL_EXPR_ARG (exp, 1);
4785 t_chain = CALL_EXPR_ARG (exp, 2);
4787 r_tramp = expand_normal (t_tramp);
4788 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4789 MEM_NOTRAP_P (m_tramp) = 1;
4791 /* The TRAMP argument should be the address of a field within the
4792 local function's FRAME decl. Let's see if we can fill in the
4793 to fill in the MEM_ATTRs for this memory. */
4794 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4795 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
4796 true, 0);
4798 tmp = round_trampoline_addr (r_tramp);
4799 if (tmp != r_tramp)
4801 m_tramp = change_address (m_tramp, BLKmode, tmp);
4802 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4803 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
4806 /* The FUNC argument should be the address of the nested function.
4807 Extract the actual function decl to pass to the hook. */
4808 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4809 t_func = TREE_OPERAND (t_func, 0);
4810 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4812 r_chain = expand_normal (t_chain);
4814 /* Generate insns to initialize the trampoline. */
4815 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4817 trampolines_created = 1;
4819 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4820 "trampoline generated for nested function %qD", t_func);
4822 return const0_rtx;
4825 static rtx
4826 expand_builtin_adjust_trampoline (tree exp)
4828 rtx tramp;
4830 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4831 return NULL_RTX;
4833 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4834 tramp = round_trampoline_addr (tramp);
4835 if (targetm.calls.trampoline_adjust_address)
4836 tramp = targetm.calls.trampoline_adjust_address (tramp);
4838 return tramp;
4841 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4842 function. The function first checks whether the back end provides
4843 an insn to implement signbit for the respective mode. If not, it
4844 checks whether the floating point format of the value is such that
4845 the sign bit can be extracted. If that is not the case, the
4846 function returns NULL_RTX to indicate that a normal call should be
4847 emitted rather than expanding the function in-line. EXP is the
4848 expression that is a call to the builtin function; if convenient,
4849 the result should be placed in TARGET. */
4850 static rtx
4851 expand_builtin_signbit (tree exp, rtx target)
4853 const struct real_format *fmt;
4854 enum machine_mode fmode, imode, rmode;
4855 tree arg;
4856 int word, bitpos;
4857 enum insn_code icode;
4858 rtx temp;
4859 location_t loc = EXPR_LOCATION (exp);
4861 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4862 return NULL_RTX;
4864 arg = CALL_EXPR_ARG (exp, 0);
4865 fmode = TYPE_MODE (TREE_TYPE (arg));
4866 rmode = TYPE_MODE (TREE_TYPE (exp));
4867 fmt = REAL_MODE_FORMAT (fmode);
4869 arg = builtin_save_expr (arg);
4871 /* Expand the argument yielding a RTX expression. */
4872 temp = expand_normal (arg);
4874 /* Check if the back end provides an insn that handles signbit for the
4875 argument's mode. */
4876 icode = optab_handler (signbit_optab, fmode);
4877 if (icode != CODE_FOR_nothing)
4879 rtx last = get_last_insn ();
4880 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4881 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4882 return target;
4883 delete_insns_since (last);
4886 /* For floating point formats without a sign bit, implement signbit
4887 as "ARG < 0.0". */
4888 bitpos = fmt->signbit_ro;
4889 if (bitpos < 0)
4891 /* But we can't do this if the format supports signed zero. */
4892 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4893 return NULL_RTX;
4895 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4896 build_real (TREE_TYPE (arg), dconst0));
4897 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4900 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4902 imode = int_mode_for_mode (fmode);
4903 if (imode == BLKmode)
4904 return NULL_RTX;
4905 temp = gen_lowpart (imode, temp);
4907 else
4909 imode = word_mode;
4910 /* Handle targets with different FP word orders. */
4911 if (FLOAT_WORDS_BIG_ENDIAN)
4912 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4913 else
4914 word = bitpos / BITS_PER_WORD;
4915 temp = operand_subword_force (temp, word, fmode);
4916 bitpos = bitpos % BITS_PER_WORD;
4919 /* Force the intermediate word_mode (or narrower) result into a
4920 register. This avoids attempting to create paradoxical SUBREGs
4921 of floating point modes below. */
4922 temp = force_reg (imode, temp);
4924 /* If the bitpos is within the "result mode" lowpart, the operation
4925 can be implement with a single bitwise AND. Otherwise, we need
4926 a right shift and an AND. */
4928 if (bitpos < GET_MODE_BITSIZE (rmode))
4930 double_int mask = double_int_setbit (double_int_zero, bitpos);
4932 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4933 temp = gen_lowpart (rmode, temp);
4934 temp = expand_binop (rmode, and_optab, temp,
4935 immed_double_int_const (mask, rmode),
4936 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4938 else
4940 /* Perform a logical right shift to place the signbit in the least
4941 significant bit, then truncate the result to the desired mode
4942 and mask just this bit. */
4943 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4944 temp = gen_lowpart (rmode, temp);
4945 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4946 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4949 return temp;
4952 /* Expand fork or exec calls. TARGET is the desired target of the
4953 call. EXP is the call. FN is the
4954 identificator of the actual function. IGNORE is nonzero if the
4955 value is to be ignored. */
4957 static rtx
4958 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4960 tree id, decl;
4961 tree call;
4963 /* If we are not profiling, just call the function. */
4964 if (!profile_arc_flag)
4965 return NULL_RTX;
4967 /* Otherwise call the wrapper. This should be equivalent for the rest of
4968 compiler, so the code does not diverge, and the wrapper may run the
4969 code necessary for keeping the profiling sane. */
4971 switch (DECL_FUNCTION_CODE (fn))
4973 case BUILT_IN_FORK:
4974 id = get_identifier ("__gcov_fork");
4975 break;
4977 case BUILT_IN_EXECL:
4978 id = get_identifier ("__gcov_execl");
4979 break;
4981 case BUILT_IN_EXECV:
4982 id = get_identifier ("__gcov_execv");
4983 break;
4985 case BUILT_IN_EXECLP:
4986 id = get_identifier ("__gcov_execlp");
4987 break;
4989 case BUILT_IN_EXECLE:
4990 id = get_identifier ("__gcov_execle");
4991 break;
4993 case BUILT_IN_EXECVP:
4994 id = get_identifier ("__gcov_execvp");
4995 break;
4997 case BUILT_IN_EXECVE:
4998 id = get_identifier ("__gcov_execve");
4999 break;
5001 default:
5002 gcc_unreachable ();
5005 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5006 FUNCTION_DECL, id, TREE_TYPE (fn));
5007 DECL_EXTERNAL (decl) = 1;
5008 TREE_PUBLIC (decl) = 1;
5009 DECL_ARTIFICIAL (decl) = 1;
5010 TREE_NOTHROW (decl) = 1;
5011 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5012 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5013 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5014 return expand_call (call, target, ignore);
5019 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5020 the pointer in these functions is void*, the tree optimizers may remove
5021 casts. The mode computed in expand_builtin isn't reliable either, due
5022 to __sync_bool_compare_and_swap.
5024 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5025 group of builtins. This gives us log2 of the mode size. */
5027 static inline enum machine_mode
5028 get_builtin_sync_mode (int fcode_diff)
5030 /* The size is not negotiable, so ask not to get BLKmode in return
5031 if the target indicates that a smaller size would be better. */
5032 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5035 /* Expand the memory expression LOC and return the appropriate memory operand
5036 for the builtin_sync operations. */
5038 static rtx
5039 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5041 rtx addr, mem;
5043 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5044 addr = convert_memory_address (Pmode, addr);
5046 /* Note that we explicitly do not want any alias information for this
5047 memory, so that we kill all other live memories. Otherwise we don't
5048 satisfy the full barrier semantics of the intrinsic. */
5049 mem = validize_mem (gen_rtx_MEM (mode, addr));
5051 /* The alignment needs to be at least according to that of the mode. */
5052 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5053 get_pointer_alignment (loc, BIGGEST_ALIGNMENT)));
5054 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5055 MEM_VOLATILE_P (mem) = 1;
5057 return mem;
5060 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5061 EXP is the CALL_EXPR. CODE is the rtx code
5062 that corresponds to the arithmetic or logical operation from the name;
5063 an exception here is that NOT actually means NAND. TARGET is an optional
5064 place for us to store the results; AFTER is true if this is the
5065 fetch_and_xxx form. IGNORE is true if we don't actually care about
5066 the result of the operation at all. */
5068 static rtx
5069 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5070 enum rtx_code code, bool after,
5071 rtx target, bool ignore)
5073 rtx val, mem;
5074 enum machine_mode old_mode;
5075 location_t loc = EXPR_LOCATION (exp);
5077 if (code == NOT && warn_sync_nand)
5079 tree fndecl = get_callee_fndecl (exp);
5080 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5082 static bool warned_f_a_n, warned_n_a_f;
5084 switch (fcode)
5086 case BUILT_IN_FETCH_AND_NAND_1:
5087 case BUILT_IN_FETCH_AND_NAND_2:
5088 case BUILT_IN_FETCH_AND_NAND_4:
5089 case BUILT_IN_FETCH_AND_NAND_8:
5090 case BUILT_IN_FETCH_AND_NAND_16:
5092 if (warned_f_a_n)
5093 break;
5095 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5096 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5097 warned_f_a_n = true;
5098 break;
5100 case BUILT_IN_NAND_AND_FETCH_1:
5101 case BUILT_IN_NAND_AND_FETCH_2:
5102 case BUILT_IN_NAND_AND_FETCH_4:
5103 case BUILT_IN_NAND_AND_FETCH_8:
5104 case BUILT_IN_NAND_AND_FETCH_16:
5106 if (warned_n_a_f)
5107 break;
5109 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5110 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5111 warned_n_a_f = true;
5112 break;
5114 default:
5115 gcc_unreachable ();
5119 /* Expand the operands. */
5120 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5122 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5123 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5124 of CONST_INTs, where we know the old_mode only from the call argument. */
5125 old_mode = GET_MODE (val);
5126 if (old_mode == VOIDmode)
5127 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5128 val = convert_modes (mode, old_mode, val, 1);
5130 if (ignore)
5131 return expand_sync_operation (mem, val, code);
5132 else
5133 return expand_sync_fetch_operation (mem, val, code, after, target);
5136 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5137 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5138 true if this is the boolean form. TARGET is a place for us to store the
5139 results; this is NOT optional if IS_BOOL is true. */
5141 static rtx
5142 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5143 bool is_bool, rtx target)
5145 rtx old_val, new_val, mem;
5146 enum machine_mode old_mode;
5148 /* Expand the operands. */
5149 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5152 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5153 mode, EXPAND_NORMAL);
5154 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5155 of CONST_INTs, where we know the old_mode only from the call argument. */
5156 old_mode = GET_MODE (old_val);
5157 if (old_mode == VOIDmode)
5158 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5159 old_val = convert_modes (mode, old_mode, old_val, 1);
5161 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5162 mode, EXPAND_NORMAL);
5163 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5164 of CONST_INTs, where we know the old_mode only from the call argument. */
5165 old_mode = GET_MODE (new_val);
5166 if (old_mode == VOIDmode)
5167 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5168 new_val = convert_modes (mode, old_mode, new_val, 1);
5170 if (is_bool)
5171 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5172 else
5173 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5176 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5177 general form is actually an atomic exchange, and some targets only
5178 support a reduced form with the second argument being a constant 1.
5179 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5180 the results. */
5182 static rtx
5183 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5184 rtx target)
5186 rtx val, mem;
5187 enum machine_mode old_mode;
5189 /* Expand the operands. */
5190 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5191 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5192 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5193 of CONST_INTs, where we know the old_mode only from the call argument. */
5194 old_mode = GET_MODE (val);
5195 if (old_mode == VOIDmode)
5196 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5197 val = convert_modes (mode, old_mode, val, 1);
5199 return expand_sync_lock_test_and_set (mem, val, target);
5202 /* Expand the __sync_synchronize intrinsic. */
5204 static void
5205 expand_builtin_synchronize (void)
5207 gimple x;
5208 VEC (tree, gc) *v_clobbers;
5210 #ifdef HAVE_memory_barrier
5211 if (HAVE_memory_barrier)
5213 emit_insn (gen_memory_barrier ());
5214 return;
5216 #endif
5218 if (synchronize_libfunc != NULL_RTX)
5220 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5221 return;
5224 /* If no explicit memory barrier instruction is available, create an
5225 empty asm stmt with a memory clobber. */
5226 v_clobbers = VEC_alloc (tree, gc, 1);
5227 VEC_quick_push (tree, v_clobbers,
5228 tree_cons (NULL, build_string (6, "memory"), NULL));
5229 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5230 gimple_asm_set_volatile (x, true);
5231 expand_asm_stmt (x);
5234 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5236 static void
5237 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5239 struct expand_operand ops[2];
5240 enum insn_code icode;
5241 rtx mem;
5243 /* Expand the operands. */
5244 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5246 /* If there is an explicit operation in the md file, use it. */
5247 icode = direct_optab_handler (sync_lock_release_optab, mode);
5248 if (icode != CODE_FOR_nothing)
5250 create_fixed_operand (&ops[0], mem);
5251 create_input_operand (&ops[1], const0_rtx, mode);
5252 if (maybe_expand_insn (icode, 2, ops))
5253 return;
5256 /* Otherwise we can implement this operation by emitting a barrier
5257 followed by a store of zero. */
5258 expand_builtin_synchronize ();
5259 emit_move_insn (mem, const0_rtx);
5262 /* Expand an expression EXP that calls a built-in function,
5263 with result going to TARGET if that's convenient
5264 (and in mode MODE if that's convenient).
5265 SUBTARGET may be used as the target for computing one of EXP's operands.
5266 IGNORE is nonzero if the value is to be ignored. */
5269 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5270 int ignore)
5272 tree fndecl = get_callee_fndecl (exp);
5273 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5274 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5275 int flags;
5277 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5278 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5280 /* When not optimizing, generate calls to library functions for a certain
5281 set of builtins. */
5282 if (!optimize
5283 && !called_as_built_in (fndecl)
5284 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5285 && fcode != BUILT_IN_ALLOCA
5286 && fcode != BUILT_IN_FREE)
5287 return expand_call (exp, target, ignore);
5289 /* The built-in function expanders test for target == const0_rtx
5290 to determine whether the function's result will be ignored. */
5291 if (ignore)
5292 target = const0_rtx;
5294 /* If the result of a pure or const built-in function is ignored, and
5295 none of its arguments are volatile, we can avoid expanding the
5296 built-in call and just evaluate the arguments for side-effects. */
5297 if (target == const0_rtx
5298 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5299 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5301 bool volatilep = false;
5302 tree arg;
5303 call_expr_arg_iterator iter;
5305 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5306 if (TREE_THIS_VOLATILE (arg))
5308 volatilep = true;
5309 break;
5312 if (! volatilep)
5314 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5315 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5316 return const0_rtx;
5320 switch (fcode)
5322 CASE_FLT_FN (BUILT_IN_FABS):
5323 target = expand_builtin_fabs (exp, target, subtarget);
5324 if (target)
5325 return target;
5326 break;
5328 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5329 target = expand_builtin_copysign (exp, target, subtarget);
5330 if (target)
5331 return target;
5332 break;
5334 /* Just do a normal library call if we were unable to fold
5335 the values. */
5336 CASE_FLT_FN (BUILT_IN_CABS):
5337 break;
5339 CASE_FLT_FN (BUILT_IN_EXP):
5340 CASE_FLT_FN (BUILT_IN_EXP10):
5341 CASE_FLT_FN (BUILT_IN_POW10):
5342 CASE_FLT_FN (BUILT_IN_EXP2):
5343 CASE_FLT_FN (BUILT_IN_EXPM1):
5344 CASE_FLT_FN (BUILT_IN_LOGB):
5345 CASE_FLT_FN (BUILT_IN_LOG):
5346 CASE_FLT_FN (BUILT_IN_LOG10):
5347 CASE_FLT_FN (BUILT_IN_LOG2):
5348 CASE_FLT_FN (BUILT_IN_LOG1P):
5349 CASE_FLT_FN (BUILT_IN_TAN):
5350 CASE_FLT_FN (BUILT_IN_ASIN):
5351 CASE_FLT_FN (BUILT_IN_ACOS):
5352 CASE_FLT_FN (BUILT_IN_ATAN):
5353 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5354 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5355 because of possible accuracy problems. */
5356 if (! flag_unsafe_math_optimizations)
5357 break;
5358 CASE_FLT_FN (BUILT_IN_SQRT):
5359 CASE_FLT_FN (BUILT_IN_FLOOR):
5360 CASE_FLT_FN (BUILT_IN_CEIL):
5361 CASE_FLT_FN (BUILT_IN_TRUNC):
5362 CASE_FLT_FN (BUILT_IN_ROUND):
5363 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5364 CASE_FLT_FN (BUILT_IN_RINT):
5365 target = expand_builtin_mathfn (exp, target, subtarget);
5366 if (target)
5367 return target;
5368 break;
5370 CASE_FLT_FN (BUILT_IN_FMA):
5371 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5372 if (target)
5373 return target;
5374 break;
5376 CASE_FLT_FN (BUILT_IN_ILOGB):
5377 if (! flag_unsafe_math_optimizations)
5378 break;
5379 CASE_FLT_FN (BUILT_IN_ISINF):
5380 CASE_FLT_FN (BUILT_IN_FINITE):
5381 case BUILT_IN_ISFINITE:
5382 case BUILT_IN_ISNORMAL:
5383 target = expand_builtin_interclass_mathfn (exp, target);
5384 if (target)
5385 return target;
5386 break;
5388 CASE_FLT_FN (BUILT_IN_LCEIL):
5389 CASE_FLT_FN (BUILT_IN_LLCEIL):
5390 CASE_FLT_FN (BUILT_IN_LFLOOR):
5391 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5392 target = expand_builtin_int_roundingfn (exp, target);
5393 if (target)
5394 return target;
5395 break;
5397 CASE_FLT_FN (BUILT_IN_LRINT):
5398 CASE_FLT_FN (BUILT_IN_LLRINT):
5399 CASE_FLT_FN (BUILT_IN_LROUND):
5400 CASE_FLT_FN (BUILT_IN_LLROUND):
5401 target = expand_builtin_int_roundingfn_2 (exp, target);
5402 if (target)
5403 return target;
5404 break;
5406 CASE_FLT_FN (BUILT_IN_POWI):
5407 target = expand_builtin_powi (exp, target);
5408 if (target)
5409 return target;
5410 break;
5412 CASE_FLT_FN (BUILT_IN_ATAN2):
5413 CASE_FLT_FN (BUILT_IN_LDEXP):
5414 CASE_FLT_FN (BUILT_IN_SCALB):
5415 CASE_FLT_FN (BUILT_IN_SCALBN):
5416 CASE_FLT_FN (BUILT_IN_SCALBLN):
5417 if (! flag_unsafe_math_optimizations)
5418 break;
5420 CASE_FLT_FN (BUILT_IN_FMOD):
5421 CASE_FLT_FN (BUILT_IN_REMAINDER):
5422 CASE_FLT_FN (BUILT_IN_DREM):
5423 CASE_FLT_FN (BUILT_IN_POW):
5424 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5425 if (target)
5426 return target;
5427 break;
5429 CASE_FLT_FN (BUILT_IN_CEXPI):
5430 target = expand_builtin_cexpi (exp, target);
5431 gcc_assert (target);
5432 return target;
5434 CASE_FLT_FN (BUILT_IN_SIN):
5435 CASE_FLT_FN (BUILT_IN_COS):
5436 if (! flag_unsafe_math_optimizations)
5437 break;
5438 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5439 if (target)
5440 return target;
5441 break;
5443 CASE_FLT_FN (BUILT_IN_SINCOS):
5444 if (! flag_unsafe_math_optimizations)
5445 break;
5446 target = expand_builtin_sincos (exp);
5447 if (target)
5448 return target;
5449 break;
5451 case BUILT_IN_APPLY_ARGS:
5452 return expand_builtin_apply_args ();
5454 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5455 FUNCTION with a copy of the parameters described by
5456 ARGUMENTS, and ARGSIZE. It returns a block of memory
5457 allocated on the stack into which is stored all the registers
5458 that might possibly be used for returning the result of a
5459 function. ARGUMENTS is the value returned by
5460 __builtin_apply_args. ARGSIZE is the number of bytes of
5461 arguments that must be copied. ??? How should this value be
5462 computed? We'll also need a safe worst case value for varargs
5463 functions. */
5464 case BUILT_IN_APPLY:
5465 if (!validate_arglist (exp, POINTER_TYPE,
5466 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5467 && !validate_arglist (exp, REFERENCE_TYPE,
5468 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5469 return const0_rtx;
5470 else
5472 rtx ops[3];
5474 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5475 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5476 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5478 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5481 /* __builtin_return (RESULT) causes the function to return the
5482 value described by RESULT. RESULT is address of the block of
5483 memory returned by __builtin_apply. */
5484 case BUILT_IN_RETURN:
5485 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5486 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5487 return const0_rtx;
5489 case BUILT_IN_SAVEREGS:
5490 return expand_builtin_saveregs ();
5492 case BUILT_IN_VA_ARG_PACK:
5493 /* All valid uses of __builtin_va_arg_pack () are removed during
5494 inlining. */
5495 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5496 return const0_rtx;
5498 case BUILT_IN_VA_ARG_PACK_LEN:
5499 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5500 inlining. */
5501 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5502 return const0_rtx;
5504 /* Return the address of the first anonymous stack arg. */
5505 case BUILT_IN_NEXT_ARG:
5506 if (fold_builtin_next_arg (exp, false))
5507 return const0_rtx;
5508 return expand_builtin_next_arg ();
5510 case BUILT_IN_CLEAR_CACHE:
5511 target = expand_builtin___clear_cache (exp);
5512 if (target)
5513 return target;
5514 break;
5516 case BUILT_IN_CLASSIFY_TYPE:
5517 return expand_builtin_classify_type (exp);
5519 case BUILT_IN_CONSTANT_P:
5520 return const0_rtx;
5522 case BUILT_IN_FRAME_ADDRESS:
5523 case BUILT_IN_RETURN_ADDRESS:
5524 return expand_builtin_frame_address (fndecl, exp);
5526 /* Returns the address of the area where the structure is returned.
5527 0 otherwise. */
5528 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5529 if (call_expr_nargs (exp) != 0
5530 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5531 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5532 return const0_rtx;
5533 else
5534 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5536 case BUILT_IN_ALLOCA:
5537 /* If the allocation stems from the declaration of a variable-sized
5538 object, it cannot accumulate. */
5539 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
5540 if (target)
5541 return target;
5542 break;
5544 case BUILT_IN_STACK_SAVE:
5545 return expand_stack_save ();
5547 case BUILT_IN_STACK_RESTORE:
5548 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5549 return const0_rtx;
5551 case BUILT_IN_BSWAP32:
5552 case BUILT_IN_BSWAP64:
5553 target = expand_builtin_bswap (exp, target, subtarget);
5555 if (target)
5556 return target;
5557 break;
5559 CASE_INT_FN (BUILT_IN_FFS):
5560 case BUILT_IN_FFSIMAX:
5561 target = expand_builtin_unop (target_mode, exp, target,
5562 subtarget, ffs_optab);
5563 if (target)
5564 return target;
5565 break;
5567 CASE_INT_FN (BUILT_IN_CLZ):
5568 case BUILT_IN_CLZIMAX:
5569 target = expand_builtin_unop (target_mode, exp, target,
5570 subtarget, clz_optab);
5571 if (target)
5572 return target;
5573 break;
5575 CASE_INT_FN (BUILT_IN_CTZ):
5576 case BUILT_IN_CTZIMAX:
5577 target = expand_builtin_unop (target_mode, exp, target,
5578 subtarget, ctz_optab);
5579 if (target)
5580 return target;
5581 break;
5583 CASE_INT_FN (BUILT_IN_POPCOUNT):
5584 case BUILT_IN_POPCOUNTIMAX:
5585 target = expand_builtin_unop (target_mode, exp, target,
5586 subtarget, popcount_optab);
5587 if (target)
5588 return target;
5589 break;
5591 CASE_INT_FN (BUILT_IN_PARITY):
5592 case BUILT_IN_PARITYIMAX:
5593 target = expand_builtin_unop (target_mode, exp, target,
5594 subtarget, parity_optab);
5595 if (target)
5596 return target;
5597 break;
5599 case BUILT_IN_STRLEN:
5600 target = expand_builtin_strlen (exp, target, target_mode);
5601 if (target)
5602 return target;
5603 break;
5605 case BUILT_IN_STRCPY:
5606 target = expand_builtin_strcpy (exp, target);
5607 if (target)
5608 return target;
5609 break;
5611 case BUILT_IN_STRNCPY:
5612 target = expand_builtin_strncpy (exp, target);
5613 if (target)
5614 return target;
5615 break;
5617 case BUILT_IN_STPCPY:
5618 target = expand_builtin_stpcpy (exp, target, mode);
5619 if (target)
5620 return target;
5621 break;
5623 case BUILT_IN_MEMCPY:
5624 target = expand_builtin_memcpy (exp, target);
5625 if (target)
5626 return target;
5627 break;
5629 case BUILT_IN_MEMPCPY:
5630 target = expand_builtin_mempcpy (exp, target, mode);
5631 if (target)
5632 return target;
5633 break;
5635 case BUILT_IN_MEMSET:
5636 target = expand_builtin_memset (exp, target, mode);
5637 if (target)
5638 return target;
5639 break;
5641 case BUILT_IN_BZERO:
5642 target = expand_builtin_bzero (exp);
5643 if (target)
5644 return target;
5645 break;
5647 case BUILT_IN_STRCMP:
5648 target = expand_builtin_strcmp (exp, target);
5649 if (target)
5650 return target;
5651 break;
5653 case BUILT_IN_STRNCMP:
5654 target = expand_builtin_strncmp (exp, target, mode);
5655 if (target)
5656 return target;
5657 break;
5659 case BUILT_IN_BCMP:
5660 case BUILT_IN_MEMCMP:
5661 target = expand_builtin_memcmp (exp, target, mode);
5662 if (target)
5663 return target;
5664 break;
5666 case BUILT_IN_SETJMP:
5667 /* This should have been lowered to the builtins below. */
5668 gcc_unreachable ();
5670 case BUILT_IN_SETJMP_SETUP:
5671 /* __builtin_setjmp_setup is passed a pointer to an array of five words
5672 and the receiver label. */
5673 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5675 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5676 VOIDmode, EXPAND_NORMAL);
5677 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
5678 rtx label_r = label_rtx (label);
5680 /* This is copied from the handling of non-local gotos. */
5681 expand_builtin_setjmp_setup (buf_addr, label_r);
5682 nonlocal_goto_handler_labels
5683 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
5684 nonlocal_goto_handler_labels);
5685 /* ??? Do not let expand_label treat us as such since we would
5686 not want to be both on the list of non-local labels and on
5687 the list of forced labels. */
5688 FORCED_LABEL (label) = 0;
5689 return const0_rtx;
5691 break;
5693 case BUILT_IN_SETJMP_DISPATCHER:
5694 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
5695 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5697 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
5698 rtx label_r = label_rtx (label);
5700 /* Remove the dispatcher label from the list of non-local labels
5701 since the receiver labels have been added to it above. */
5702 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
5703 return const0_rtx;
5705 break;
5707 case BUILT_IN_SETJMP_RECEIVER:
5708 /* __builtin_setjmp_receiver is passed the receiver label. */
5709 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5711 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
5712 rtx label_r = label_rtx (label);
5714 expand_builtin_setjmp_receiver (label_r);
5715 return const0_rtx;
5717 break;
5719 /* __builtin_longjmp is passed a pointer to an array of five words.
5720 It's similar to the C library longjmp function but works with
5721 __builtin_setjmp above. */
5722 case BUILT_IN_LONGJMP:
5723 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5725 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5726 VOIDmode, EXPAND_NORMAL);
5727 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
5729 if (value != const1_rtx)
5731 error ("%<__builtin_longjmp%> second argument must be 1");
5732 return const0_rtx;
5735 expand_builtin_longjmp (buf_addr, value);
5736 return const0_rtx;
5738 break;
5740 case BUILT_IN_NONLOCAL_GOTO:
5741 target = expand_builtin_nonlocal_goto (exp);
5742 if (target)
5743 return target;
5744 break;
5746 /* This updates the setjmp buffer that is its argument with the value
5747 of the current stack pointer. */
5748 case BUILT_IN_UPDATE_SETJMP_BUF:
5749 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5751 rtx buf_addr
5752 = expand_normal (CALL_EXPR_ARG (exp, 0));
5754 expand_builtin_update_setjmp_buf (buf_addr);
5755 return const0_rtx;
5757 break;
5759 case BUILT_IN_TRAP:
5760 expand_builtin_trap ();
5761 return const0_rtx;
5763 case BUILT_IN_UNREACHABLE:
5764 expand_builtin_unreachable ();
5765 return const0_rtx;
5767 CASE_FLT_FN (BUILT_IN_SIGNBIT):
5768 case BUILT_IN_SIGNBITD32:
5769 case BUILT_IN_SIGNBITD64:
5770 case BUILT_IN_SIGNBITD128:
5771 target = expand_builtin_signbit (exp, target);
5772 if (target)
5773 return target;
5774 break;
5776 /* Various hooks for the DWARF 2 __throw routine. */
5777 case BUILT_IN_UNWIND_INIT:
5778 expand_builtin_unwind_init ();
5779 return const0_rtx;
5780 case BUILT_IN_DWARF_CFA:
5781 return virtual_cfa_rtx;
5782 #ifdef DWARF2_UNWIND_INFO
5783 case BUILT_IN_DWARF_SP_COLUMN:
5784 return expand_builtin_dwarf_sp_column ();
5785 case BUILT_IN_INIT_DWARF_REG_SIZES:
5786 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
5787 return const0_rtx;
5788 #endif
5789 case BUILT_IN_FROB_RETURN_ADDR:
5790 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
5791 case BUILT_IN_EXTRACT_RETURN_ADDR:
5792 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
5793 case BUILT_IN_EH_RETURN:
5794 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
5795 CALL_EXPR_ARG (exp, 1));
5796 return const0_rtx;
5797 #ifdef EH_RETURN_DATA_REGNO
5798 case BUILT_IN_EH_RETURN_DATA_REGNO:
5799 return expand_builtin_eh_return_data_regno (exp);
5800 #endif
5801 case BUILT_IN_EXTEND_POINTER:
5802 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
5803 case BUILT_IN_EH_POINTER:
5804 return expand_builtin_eh_pointer (exp);
5805 case BUILT_IN_EH_FILTER:
5806 return expand_builtin_eh_filter (exp);
5807 case BUILT_IN_EH_COPY_VALUES:
5808 return expand_builtin_eh_copy_values (exp);
5810 case BUILT_IN_VA_START:
5811 return expand_builtin_va_start (exp);
5812 case BUILT_IN_VA_END:
5813 return expand_builtin_va_end (exp);
5814 case BUILT_IN_VA_COPY:
5815 return expand_builtin_va_copy (exp);
5816 case BUILT_IN_EXPECT:
5817 return expand_builtin_expect (exp, target);
5818 case BUILT_IN_PREFETCH:
5819 expand_builtin_prefetch (exp);
5820 return const0_rtx;
5822 case BUILT_IN_INIT_TRAMPOLINE:
5823 return expand_builtin_init_trampoline (exp);
5824 case BUILT_IN_ADJUST_TRAMPOLINE:
5825 return expand_builtin_adjust_trampoline (exp);
5827 case BUILT_IN_FORK:
5828 case BUILT_IN_EXECL:
5829 case BUILT_IN_EXECV:
5830 case BUILT_IN_EXECLP:
5831 case BUILT_IN_EXECLE:
5832 case BUILT_IN_EXECVP:
5833 case BUILT_IN_EXECVE:
5834 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
5835 if (target)
5836 return target;
5837 break;
5839 case BUILT_IN_FETCH_AND_ADD_1:
5840 case BUILT_IN_FETCH_AND_ADD_2:
5841 case BUILT_IN_FETCH_AND_ADD_4:
5842 case BUILT_IN_FETCH_AND_ADD_8:
5843 case BUILT_IN_FETCH_AND_ADD_16:
5844 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
5845 target = expand_builtin_sync_operation (mode, exp, PLUS,
5846 false, target, ignore);
5847 if (target)
5848 return target;
5849 break;
5851 case BUILT_IN_FETCH_AND_SUB_1:
5852 case BUILT_IN_FETCH_AND_SUB_2:
5853 case BUILT_IN_FETCH_AND_SUB_4:
5854 case BUILT_IN_FETCH_AND_SUB_8:
5855 case BUILT_IN_FETCH_AND_SUB_16:
5856 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
5857 target = expand_builtin_sync_operation (mode, exp, MINUS,
5858 false, target, ignore);
5859 if (target)
5860 return target;
5861 break;
5863 case BUILT_IN_FETCH_AND_OR_1:
5864 case BUILT_IN_FETCH_AND_OR_2:
5865 case BUILT_IN_FETCH_AND_OR_4:
5866 case BUILT_IN_FETCH_AND_OR_8:
5867 case BUILT_IN_FETCH_AND_OR_16:
5868 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
5869 target = expand_builtin_sync_operation (mode, exp, IOR,
5870 false, target, ignore);
5871 if (target)
5872 return target;
5873 break;
5875 case BUILT_IN_FETCH_AND_AND_1:
5876 case BUILT_IN_FETCH_AND_AND_2:
5877 case BUILT_IN_FETCH_AND_AND_4:
5878 case BUILT_IN_FETCH_AND_AND_8:
5879 case BUILT_IN_FETCH_AND_AND_16:
5880 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
5881 target = expand_builtin_sync_operation (mode, exp, AND,
5882 false, target, ignore);
5883 if (target)
5884 return target;
5885 break;
5887 case BUILT_IN_FETCH_AND_XOR_1:
5888 case BUILT_IN_FETCH_AND_XOR_2:
5889 case BUILT_IN_FETCH_AND_XOR_4:
5890 case BUILT_IN_FETCH_AND_XOR_8:
5891 case BUILT_IN_FETCH_AND_XOR_16:
5892 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
5893 target = expand_builtin_sync_operation (mode, exp, XOR,
5894 false, target, ignore);
5895 if (target)
5896 return target;
5897 break;
5899 case BUILT_IN_FETCH_AND_NAND_1:
5900 case BUILT_IN_FETCH_AND_NAND_2:
5901 case BUILT_IN_FETCH_AND_NAND_4:
5902 case BUILT_IN_FETCH_AND_NAND_8:
5903 case BUILT_IN_FETCH_AND_NAND_16:
5904 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
5905 target = expand_builtin_sync_operation (mode, exp, NOT,
5906 false, target, ignore);
5907 if (target)
5908 return target;
5909 break;
5911 case BUILT_IN_ADD_AND_FETCH_1:
5912 case BUILT_IN_ADD_AND_FETCH_2:
5913 case BUILT_IN_ADD_AND_FETCH_4:
5914 case BUILT_IN_ADD_AND_FETCH_8:
5915 case BUILT_IN_ADD_AND_FETCH_16:
5916 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
5917 target = expand_builtin_sync_operation (mode, exp, PLUS,
5918 true, target, ignore);
5919 if (target)
5920 return target;
5921 break;
5923 case BUILT_IN_SUB_AND_FETCH_1:
5924 case BUILT_IN_SUB_AND_FETCH_2:
5925 case BUILT_IN_SUB_AND_FETCH_4:
5926 case BUILT_IN_SUB_AND_FETCH_8:
5927 case BUILT_IN_SUB_AND_FETCH_16:
5928 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
5929 target = expand_builtin_sync_operation (mode, exp, MINUS,
5930 true, target, ignore);
5931 if (target)
5932 return target;
5933 break;
5935 case BUILT_IN_OR_AND_FETCH_1:
5936 case BUILT_IN_OR_AND_FETCH_2:
5937 case BUILT_IN_OR_AND_FETCH_4:
5938 case BUILT_IN_OR_AND_FETCH_8:
5939 case BUILT_IN_OR_AND_FETCH_16:
5940 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
5941 target = expand_builtin_sync_operation (mode, exp, IOR,
5942 true, target, ignore);
5943 if (target)
5944 return target;
5945 break;
5947 case BUILT_IN_AND_AND_FETCH_1:
5948 case BUILT_IN_AND_AND_FETCH_2:
5949 case BUILT_IN_AND_AND_FETCH_4:
5950 case BUILT_IN_AND_AND_FETCH_8:
5951 case BUILT_IN_AND_AND_FETCH_16:
5952 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
5953 target = expand_builtin_sync_operation (mode, exp, AND,
5954 true, target, ignore);
5955 if (target)
5956 return target;
5957 break;
5959 case BUILT_IN_XOR_AND_FETCH_1:
5960 case BUILT_IN_XOR_AND_FETCH_2:
5961 case BUILT_IN_XOR_AND_FETCH_4:
5962 case BUILT_IN_XOR_AND_FETCH_8:
5963 case BUILT_IN_XOR_AND_FETCH_16:
5964 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
5965 target = expand_builtin_sync_operation (mode, exp, XOR,
5966 true, target, ignore);
5967 if (target)
5968 return target;
5969 break;
5971 case BUILT_IN_NAND_AND_FETCH_1:
5972 case BUILT_IN_NAND_AND_FETCH_2:
5973 case BUILT_IN_NAND_AND_FETCH_4:
5974 case BUILT_IN_NAND_AND_FETCH_8:
5975 case BUILT_IN_NAND_AND_FETCH_16:
5976 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
5977 target = expand_builtin_sync_operation (mode, exp, NOT,
5978 true, target, ignore);
5979 if (target)
5980 return target;
5981 break;
5983 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
5984 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
5985 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
5986 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
5987 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
5988 if (mode == VOIDmode)
5989 mode = TYPE_MODE (boolean_type_node);
5990 if (!target || !register_operand (target, mode))
5991 target = gen_reg_rtx (mode);
5993 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
5994 target = expand_builtin_compare_and_swap (mode, exp, true, target);
5995 if (target)
5996 return target;
5997 break;
5999 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6000 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6001 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6002 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6003 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6004 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6005 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6006 if (target)
6007 return target;
6008 break;
6010 case BUILT_IN_LOCK_TEST_AND_SET_1:
6011 case BUILT_IN_LOCK_TEST_AND_SET_2:
6012 case BUILT_IN_LOCK_TEST_AND_SET_4:
6013 case BUILT_IN_LOCK_TEST_AND_SET_8:
6014 case BUILT_IN_LOCK_TEST_AND_SET_16:
6015 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6016 target = expand_builtin_lock_test_and_set (mode, exp, target);
6017 if (target)
6018 return target;
6019 break;
6021 case BUILT_IN_LOCK_RELEASE_1:
6022 case BUILT_IN_LOCK_RELEASE_2:
6023 case BUILT_IN_LOCK_RELEASE_4:
6024 case BUILT_IN_LOCK_RELEASE_8:
6025 case BUILT_IN_LOCK_RELEASE_16:
6026 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6027 expand_builtin_lock_release (mode, exp);
6028 return const0_rtx;
6030 case BUILT_IN_SYNCHRONIZE:
6031 expand_builtin_synchronize ();
6032 return const0_rtx;
6034 case BUILT_IN_OBJECT_SIZE:
6035 return expand_builtin_object_size (exp);
6037 case BUILT_IN_MEMCPY_CHK:
6038 case BUILT_IN_MEMPCPY_CHK:
6039 case BUILT_IN_MEMMOVE_CHK:
6040 case BUILT_IN_MEMSET_CHK:
6041 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6042 if (target)
6043 return target;
6044 break;
6046 case BUILT_IN_STRCPY_CHK:
6047 case BUILT_IN_STPCPY_CHK:
6048 case BUILT_IN_STRNCPY_CHK:
6049 case BUILT_IN_STRCAT_CHK:
6050 case BUILT_IN_STRNCAT_CHK:
6051 case BUILT_IN_SNPRINTF_CHK:
6052 case BUILT_IN_VSNPRINTF_CHK:
6053 maybe_emit_chk_warning (exp, fcode);
6054 break;
6056 case BUILT_IN_SPRINTF_CHK:
6057 case BUILT_IN_VSPRINTF_CHK:
6058 maybe_emit_sprintf_chk_warning (exp, fcode);
6059 break;
6061 case BUILT_IN_FREE:
6062 maybe_emit_free_warning (exp);
6063 break;
6065 default: /* just do library call, if unknown builtin */
6066 break;
6069 /* The switch statement above can drop through to cause the function
6070 to be called normally. */
6071 return expand_call (exp, target, ignore);
6074 /* Determine whether a tree node represents a call to a built-in
6075 function. If the tree T is a call to a built-in function with
6076 the right number of arguments of the appropriate types, return
6077 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6078 Otherwise the return value is END_BUILTINS. */
6080 enum built_in_function
6081 builtin_mathfn_code (const_tree t)
6083 const_tree fndecl, arg, parmlist;
6084 const_tree argtype, parmtype;
6085 const_call_expr_arg_iterator iter;
6087 if (TREE_CODE (t) != CALL_EXPR
6088 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6089 return END_BUILTINS;
6091 fndecl = get_callee_fndecl (t);
6092 if (fndecl == NULL_TREE
6093 || TREE_CODE (fndecl) != FUNCTION_DECL
6094 || ! DECL_BUILT_IN (fndecl)
6095 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6096 return END_BUILTINS;
6098 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6099 init_const_call_expr_arg_iterator (t, &iter);
6100 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6102 /* If a function doesn't take a variable number of arguments,
6103 the last element in the list will have type `void'. */
6104 parmtype = TREE_VALUE (parmlist);
6105 if (VOID_TYPE_P (parmtype))
6107 if (more_const_call_expr_args_p (&iter))
6108 return END_BUILTINS;
6109 return DECL_FUNCTION_CODE (fndecl);
6112 if (! more_const_call_expr_args_p (&iter))
6113 return END_BUILTINS;
6115 arg = next_const_call_expr_arg (&iter);
6116 argtype = TREE_TYPE (arg);
6118 if (SCALAR_FLOAT_TYPE_P (parmtype))
6120 if (! SCALAR_FLOAT_TYPE_P (argtype))
6121 return END_BUILTINS;
6123 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6125 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6126 return END_BUILTINS;
6128 else if (POINTER_TYPE_P (parmtype))
6130 if (! POINTER_TYPE_P (argtype))
6131 return END_BUILTINS;
6133 else if (INTEGRAL_TYPE_P (parmtype))
6135 if (! INTEGRAL_TYPE_P (argtype))
6136 return END_BUILTINS;
6138 else
6139 return END_BUILTINS;
6142 /* Variable-length argument list. */
6143 return DECL_FUNCTION_CODE (fndecl);
6146 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6147 evaluate to a constant. */
6149 static tree
6150 fold_builtin_constant_p (tree arg)
6152 /* We return 1 for a numeric type that's known to be a constant
6153 value at compile-time or for an aggregate type that's a
6154 literal constant. */
6155 STRIP_NOPS (arg);
6157 /* If we know this is a constant, emit the constant of one. */
6158 if (CONSTANT_CLASS_P (arg)
6159 || (TREE_CODE (arg) == CONSTRUCTOR
6160 && TREE_CONSTANT (arg)))
6161 return integer_one_node;
6162 if (TREE_CODE (arg) == ADDR_EXPR)
6164 tree op = TREE_OPERAND (arg, 0);
6165 if (TREE_CODE (op) == STRING_CST
6166 || (TREE_CODE (op) == ARRAY_REF
6167 && integer_zerop (TREE_OPERAND (op, 1))
6168 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6169 return integer_one_node;
6172 /* If this expression has side effects, show we don't know it to be a
6173 constant. Likewise if it's a pointer or aggregate type since in
6174 those case we only want literals, since those are only optimized
6175 when generating RTL, not later.
6176 And finally, if we are compiling an initializer, not code, we
6177 need to return a definite result now; there's not going to be any
6178 more optimization done. */
6179 if (TREE_SIDE_EFFECTS (arg)
6180 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6181 || POINTER_TYPE_P (TREE_TYPE (arg))
6182 || cfun == 0
6183 || folding_initializer)
6184 return integer_zero_node;
6186 return NULL_TREE;
6189 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6190 return it as a truthvalue. */
6192 static tree
6193 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6195 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6197 fn = built_in_decls[BUILT_IN_EXPECT];
6198 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6199 ret_type = TREE_TYPE (TREE_TYPE (fn));
6200 pred_type = TREE_VALUE (arg_types);
6201 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6203 pred = fold_convert_loc (loc, pred_type, pred);
6204 expected = fold_convert_loc (loc, expected_type, expected);
6205 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6207 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6208 build_int_cst (ret_type, 0));
6211 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6212 NULL_TREE if no simplification is possible. */
6214 static tree
6215 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6217 tree inner, fndecl;
6218 enum tree_code code;
6220 /* If this is a builtin_expect within a builtin_expect keep the
6221 inner one. See through a comparison against a constant. It
6222 might have been added to create a thruthvalue. */
6223 inner = arg0;
6224 if (COMPARISON_CLASS_P (inner)
6225 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6226 inner = TREE_OPERAND (inner, 0);
6228 if (TREE_CODE (inner) == CALL_EXPR
6229 && (fndecl = get_callee_fndecl (inner))
6230 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6231 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6232 return arg0;
6234 /* Distribute the expected value over short-circuiting operators.
6235 See through the cast from truthvalue_type_node to long. */
6236 inner = arg0;
6237 while (TREE_CODE (inner) == NOP_EXPR
6238 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6239 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6240 inner = TREE_OPERAND (inner, 0);
6242 code = TREE_CODE (inner);
6243 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6245 tree op0 = TREE_OPERAND (inner, 0);
6246 tree op1 = TREE_OPERAND (inner, 1);
6248 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6249 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6250 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6252 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6255 /* If the argument isn't invariant then there's nothing else we can do. */
6256 if (!TREE_CONSTANT (arg0))
6257 return NULL_TREE;
6259 /* If we expect that a comparison against the argument will fold to
6260 a constant return the constant. In practice, this means a true
6261 constant or the address of a non-weak symbol. */
6262 inner = arg0;
6263 STRIP_NOPS (inner);
6264 if (TREE_CODE (inner) == ADDR_EXPR)
6268 inner = TREE_OPERAND (inner, 0);
6270 while (TREE_CODE (inner) == COMPONENT_REF
6271 || TREE_CODE (inner) == ARRAY_REF);
6272 if ((TREE_CODE (inner) == VAR_DECL
6273 || TREE_CODE (inner) == FUNCTION_DECL)
6274 && DECL_WEAK (inner))
6275 return NULL_TREE;
6278 /* Otherwise, ARG0 already has the proper type for the return value. */
6279 return arg0;
6282 /* Fold a call to __builtin_classify_type with argument ARG. */
6284 static tree
6285 fold_builtin_classify_type (tree arg)
6287 if (arg == 0)
6288 return build_int_cst (integer_type_node, no_type_class);
6290 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
6293 /* Fold a call to __builtin_strlen with argument ARG. */
6295 static tree
6296 fold_builtin_strlen (location_t loc, tree type, tree arg)
6298 if (!validate_arg (arg, POINTER_TYPE))
6299 return NULL_TREE;
6300 else
6302 tree len = c_strlen (arg, 0);
6304 if (len)
6305 return fold_convert_loc (loc, type, len);
6307 return NULL_TREE;
6311 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6313 static tree
6314 fold_builtin_inf (location_t loc, tree type, int warn)
6316 REAL_VALUE_TYPE real;
6318 /* __builtin_inff is intended to be usable to define INFINITY on all
6319 targets. If an infinity is not available, INFINITY expands "to a
6320 positive constant of type float that overflows at translation
6321 time", footnote "In this case, using INFINITY will violate the
6322 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6323 Thus we pedwarn to ensure this constraint violation is
6324 diagnosed. */
6325 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6326 pedwarn (loc, 0, "target format does not support infinity");
6328 real_inf (&real);
6329 return build_real (type, real);
6332 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6334 static tree
6335 fold_builtin_nan (tree arg, tree type, int quiet)
6337 REAL_VALUE_TYPE real;
6338 const char *str;
6340 if (!validate_arg (arg, POINTER_TYPE))
6341 return NULL_TREE;
6342 str = c_getstr (arg);
6343 if (!str)
6344 return NULL_TREE;
6346 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6347 return NULL_TREE;
6349 return build_real (type, real);
6352 /* Return true if the floating point expression T has an integer value.
6353 We also allow +Inf, -Inf and NaN to be considered integer values. */
6355 static bool
6356 integer_valued_real_p (tree t)
6358 switch (TREE_CODE (t))
6360 case FLOAT_EXPR:
6361 return true;
6363 case ABS_EXPR:
6364 case SAVE_EXPR:
6365 return integer_valued_real_p (TREE_OPERAND (t, 0));
6367 case COMPOUND_EXPR:
6368 case MODIFY_EXPR:
6369 case BIND_EXPR:
6370 return integer_valued_real_p (TREE_OPERAND (t, 1));
6372 case PLUS_EXPR:
6373 case MINUS_EXPR:
6374 case MULT_EXPR:
6375 case MIN_EXPR:
6376 case MAX_EXPR:
6377 return integer_valued_real_p (TREE_OPERAND (t, 0))
6378 && integer_valued_real_p (TREE_OPERAND (t, 1));
6380 case COND_EXPR:
6381 return integer_valued_real_p (TREE_OPERAND (t, 1))
6382 && integer_valued_real_p (TREE_OPERAND (t, 2));
6384 case REAL_CST:
6385 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6387 case NOP_EXPR:
6389 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6390 if (TREE_CODE (type) == INTEGER_TYPE)
6391 return true;
6392 if (TREE_CODE (type) == REAL_TYPE)
6393 return integer_valued_real_p (TREE_OPERAND (t, 0));
6394 break;
6397 case CALL_EXPR:
6398 switch (builtin_mathfn_code (t))
6400 CASE_FLT_FN (BUILT_IN_CEIL):
6401 CASE_FLT_FN (BUILT_IN_FLOOR):
6402 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6403 CASE_FLT_FN (BUILT_IN_RINT):
6404 CASE_FLT_FN (BUILT_IN_ROUND):
6405 CASE_FLT_FN (BUILT_IN_TRUNC):
6406 return true;
6408 CASE_FLT_FN (BUILT_IN_FMIN):
6409 CASE_FLT_FN (BUILT_IN_FMAX):
6410 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6411 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6413 default:
6414 break;
6416 break;
6418 default:
6419 break;
6421 return false;
6424 /* FNDECL is assumed to be a builtin where truncation can be propagated
6425 across (for instance floor((double)f) == (double)floorf (f).
6426 Do the transformation for a call with argument ARG. */
6428 static tree
6429 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6431 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6433 if (!validate_arg (arg, REAL_TYPE))
6434 return NULL_TREE;
6436 /* Integer rounding functions are idempotent. */
6437 if (fcode == builtin_mathfn_code (arg))
6438 return arg;
6440 /* If argument is already integer valued, and we don't need to worry
6441 about setting errno, there's no need to perform rounding. */
6442 if (! flag_errno_math && integer_valued_real_p (arg))
6443 return arg;
6445 if (optimize)
6447 tree arg0 = strip_float_extensions (arg);
6448 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6449 tree newtype = TREE_TYPE (arg0);
6450 tree decl;
6452 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6453 && (decl = mathfn_built_in (newtype, fcode)))
6454 return fold_convert_loc (loc, ftype,
6455 build_call_expr_loc (loc, decl, 1,
6456 fold_convert_loc (loc,
6457 newtype,
6458 arg0)));
6460 return NULL_TREE;
6463 /* FNDECL is assumed to be builtin which can narrow the FP type of
6464 the argument, for instance lround((double)f) -> lroundf (f).
6465 Do the transformation for a call with argument ARG. */
6467 static tree
6468 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6470 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6472 if (!validate_arg (arg, REAL_TYPE))
6473 return NULL_TREE;
6475 /* If argument is already integer valued, and we don't need to worry
6476 about setting errno, there's no need to perform rounding. */
6477 if (! flag_errno_math && integer_valued_real_p (arg))
6478 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6479 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6481 if (optimize)
6483 tree ftype = TREE_TYPE (arg);
6484 tree arg0 = strip_float_extensions (arg);
6485 tree newtype = TREE_TYPE (arg0);
6486 tree decl;
6488 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6489 && (decl = mathfn_built_in (newtype, fcode)))
6490 return build_call_expr_loc (loc, decl, 1,
6491 fold_convert_loc (loc, newtype, arg0));
6494 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6495 sizeof (long long) == sizeof (long). */
6496 if (TYPE_PRECISION (long_long_integer_type_node)
6497 == TYPE_PRECISION (long_integer_type_node))
6499 tree newfn = NULL_TREE;
6500 switch (fcode)
6502 CASE_FLT_FN (BUILT_IN_LLCEIL):
6503 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6504 break;
6506 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6507 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6508 break;
6510 CASE_FLT_FN (BUILT_IN_LLROUND):
6511 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6512 break;
6514 CASE_FLT_FN (BUILT_IN_LLRINT):
6515 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6516 break;
6518 default:
6519 break;
6522 if (newfn)
6524 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6525 return fold_convert_loc (loc,
6526 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6530 return NULL_TREE;
6533 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6534 return type. Return NULL_TREE if no simplification can be made. */
6536 static tree
6537 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6539 tree res;
6541 if (!validate_arg (arg, COMPLEX_TYPE)
6542 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6543 return NULL_TREE;
6545 /* Calculate the result when the argument is a constant. */
6546 if (TREE_CODE (arg) == COMPLEX_CST
6547 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6548 type, mpfr_hypot)))
6549 return res;
6551 if (TREE_CODE (arg) == COMPLEX_EXPR)
6553 tree real = TREE_OPERAND (arg, 0);
6554 tree imag = TREE_OPERAND (arg, 1);
6556 /* If either part is zero, cabs is fabs of the other. */
6557 if (real_zerop (real))
6558 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6559 if (real_zerop (imag))
6560 return fold_build1_loc (loc, ABS_EXPR, type, real);
6562 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6563 if (flag_unsafe_math_optimizations
6564 && operand_equal_p (real, imag, OEP_PURE_SAME))
6566 const REAL_VALUE_TYPE sqrt2_trunc
6567 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6568 STRIP_NOPS (real);
6569 return fold_build2_loc (loc, MULT_EXPR, type,
6570 fold_build1_loc (loc, ABS_EXPR, type, real),
6571 build_real (type, sqrt2_trunc));
6575 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6576 if (TREE_CODE (arg) == NEGATE_EXPR
6577 || TREE_CODE (arg) == CONJ_EXPR)
6578 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6580 /* Don't do this when optimizing for size. */
6581 if (flag_unsafe_math_optimizations
6582 && optimize && optimize_function_for_speed_p (cfun))
6584 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6586 if (sqrtfn != NULL_TREE)
6588 tree rpart, ipart, result;
6590 arg = builtin_save_expr (arg);
6592 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6593 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6595 rpart = builtin_save_expr (rpart);
6596 ipart = builtin_save_expr (ipart);
6598 result = fold_build2_loc (loc, PLUS_EXPR, type,
6599 fold_build2_loc (loc, MULT_EXPR, type,
6600 rpart, rpart),
6601 fold_build2_loc (loc, MULT_EXPR, type,
6602 ipart, ipart));
6604 return build_call_expr_loc (loc, sqrtfn, 1, result);
6608 return NULL_TREE;
6611 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
6612 complex tree type of the result. If NEG is true, the imaginary
6613 zero is negative. */
6615 static tree
6616 build_complex_cproj (tree type, bool neg)
6618 REAL_VALUE_TYPE rinf, rzero = dconst0;
6620 real_inf (&rinf);
6621 rzero.sign = neg;
6622 return build_complex (type, build_real (TREE_TYPE (type), rinf),
6623 build_real (TREE_TYPE (type), rzero));
6626 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
6627 return type. Return NULL_TREE if no simplification can be made. */
6629 static tree
6630 fold_builtin_cproj (location_t loc, tree arg, tree type)
6632 if (!validate_arg (arg, COMPLEX_TYPE)
6633 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6634 return NULL_TREE;
6636 /* If there are no infinities, return arg. */
6637 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
6638 return non_lvalue_loc (loc, arg);
6640 /* Calculate the result when the argument is a constant. */
6641 if (TREE_CODE (arg) == COMPLEX_CST)
6643 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
6644 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
6646 if (real_isinf (real) || real_isinf (imag))
6647 return build_complex_cproj (type, imag->sign);
6648 else
6649 return arg;
6651 else if (TREE_CODE (arg) == COMPLEX_EXPR)
6653 tree real = TREE_OPERAND (arg, 0);
6654 tree imag = TREE_OPERAND (arg, 1);
6656 STRIP_NOPS (real);
6657 STRIP_NOPS (imag);
6659 /* If the real part is inf and the imag part is known to be
6660 nonnegative, return (inf + 0i). Remember side-effects are
6661 possible in the imag part. */
6662 if (TREE_CODE (real) == REAL_CST
6663 && real_isinf (TREE_REAL_CST_PTR (real))
6664 && tree_expr_nonnegative_p (imag))
6665 return omit_one_operand_loc (loc, type,
6666 build_complex_cproj (type, false),
6667 arg);
6669 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
6670 Remember side-effects are possible in the real part. */
6671 if (TREE_CODE (imag) == REAL_CST
6672 && real_isinf (TREE_REAL_CST_PTR (imag)))
6673 return
6674 omit_one_operand_loc (loc, type,
6675 build_complex_cproj (type, TREE_REAL_CST_PTR
6676 (imag)->sign), arg);
6679 return NULL_TREE;
6682 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6683 Return NULL_TREE if no simplification can be made. */
6685 static tree
6686 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6689 enum built_in_function fcode;
6690 tree res;
6692 if (!validate_arg (arg, REAL_TYPE))
6693 return NULL_TREE;
6695 /* Calculate the result when the argument is a constant. */
6696 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6697 return res;
6699 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6700 fcode = builtin_mathfn_code (arg);
6701 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6703 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6704 arg = fold_build2_loc (loc, MULT_EXPR, type,
6705 CALL_EXPR_ARG (arg, 0),
6706 build_real (type, dconsthalf));
6707 return build_call_expr_loc (loc, expfn, 1, arg);
6710 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6711 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6713 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6715 if (powfn)
6717 tree arg0 = CALL_EXPR_ARG (arg, 0);
6718 tree tree_root;
6719 /* The inner root was either sqrt or cbrt. */
6720 /* This was a conditional expression but it triggered a bug
6721 in Sun C 5.5. */
6722 REAL_VALUE_TYPE dconstroot;
6723 if (BUILTIN_SQRT_P (fcode))
6724 dconstroot = dconsthalf;
6725 else
6726 dconstroot = dconst_third ();
6728 /* Adjust for the outer root. */
6729 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6730 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6731 tree_root = build_real (type, dconstroot);
6732 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6736 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6737 if (flag_unsafe_math_optimizations
6738 && (fcode == BUILT_IN_POW
6739 || fcode == BUILT_IN_POWF
6740 || fcode == BUILT_IN_POWL))
6742 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6743 tree arg0 = CALL_EXPR_ARG (arg, 0);
6744 tree arg1 = CALL_EXPR_ARG (arg, 1);
6745 tree narg1;
6746 if (!tree_expr_nonnegative_p (arg0))
6747 arg0 = build1 (ABS_EXPR, type, arg0);
6748 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
6749 build_real (type, dconsthalf));
6750 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
6753 return NULL_TREE;
6756 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
6757 Return NULL_TREE if no simplification can be made. */
6759 static tree
6760 fold_builtin_cbrt (location_t loc, tree arg, tree type)
6762 const enum built_in_function fcode = builtin_mathfn_code (arg);
6763 tree res;
6765 if (!validate_arg (arg, REAL_TYPE))
6766 return NULL_TREE;
6768 /* Calculate the result when the argument is a constant. */
6769 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
6770 return res;
6772 if (flag_unsafe_math_optimizations)
6774 /* Optimize cbrt(expN(x)) -> expN(x/3). */
6775 if (BUILTIN_EXPONENT_P (fcode))
6777 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6778 const REAL_VALUE_TYPE third_trunc =
6779 real_value_truncate (TYPE_MODE (type), dconst_third ());
6780 arg = fold_build2_loc (loc, MULT_EXPR, type,
6781 CALL_EXPR_ARG (arg, 0),
6782 build_real (type, third_trunc));
6783 return build_call_expr_loc (loc, expfn, 1, arg);
6786 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
6787 if (BUILTIN_SQRT_P (fcode))
6789 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6791 if (powfn)
6793 tree arg0 = CALL_EXPR_ARG (arg, 0);
6794 tree tree_root;
6795 REAL_VALUE_TYPE dconstroot = dconst_third ();
6797 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6798 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6799 tree_root = build_real (type, dconstroot);
6800 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6804 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
6805 if (BUILTIN_CBRT_P (fcode))
6807 tree arg0 = CALL_EXPR_ARG (arg, 0);
6808 if (tree_expr_nonnegative_p (arg0))
6810 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6812 if (powfn)
6814 tree tree_root;
6815 REAL_VALUE_TYPE dconstroot;
6817 real_arithmetic (&dconstroot, MULT_EXPR,
6818 dconst_third_ptr (), dconst_third_ptr ());
6819 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6820 tree_root = build_real (type, dconstroot);
6821 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6826 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
6827 if (fcode == BUILT_IN_POW
6828 || fcode == BUILT_IN_POWF
6829 || fcode == BUILT_IN_POWL)
6831 tree arg00 = CALL_EXPR_ARG (arg, 0);
6832 tree arg01 = CALL_EXPR_ARG (arg, 1);
6833 if (tree_expr_nonnegative_p (arg00))
6835 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6836 const REAL_VALUE_TYPE dconstroot
6837 = real_value_truncate (TYPE_MODE (type), dconst_third ());
6838 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
6839 build_real (type, dconstroot));
6840 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
6844 return NULL_TREE;
6847 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
6848 TYPE is the type of the return value. Return NULL_TREE if no
6849 simplification can be made. */
6851 static tree
6852 fold_builtin_cos (location_t loc,
6853 tree arg, tree type, tree fndecl)
6855 tree res, narg;
6857 if (!validate_arg (arg, REAL_TYPE))
6858 return NULL_TREE;
6860 /* Calculate the result when the argument is a constant. */
6861 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
6862 return res;
6864 /* Optimize cos(-x) into cos (x). */
6865 if ((narg = fold_strip_sign_ops (arg)))
6866 return build_call_expr_loc (loc, fndecl, 1, narg);
6868 return NULL_TREE;
6871 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
6872 Return NULL_TREE if no simplification can be made. */
6874 static tree
6875 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
6877 if (validate_arg (arg, REAL_TYPE))
6879 tree res, narg;
6881 /* Calculate the result when the argument is a constant. */
6882 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
6883 return res;
6885 /* Optimize cosh(-x) into cosh (x). */
6886 if ((narg = fold_strip_sign_ops (arg)))
6887 return build_call_expr_loc (loc, fndecl, 1, narg);
6890 return NULL_TREE;
6893 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
6894 argument ARG. TYPE is the type of the return value. Return
6895 NULL_TREE if no simplification can be made. */
6897 static tree
6898 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
6899 bool hyper)
6901 if (validate_arg (arg, COMPLEX_TYPE)
6902 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
6904 tree tmp;
6906 /* Calculate the result when the argument is a constant. */
6907 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
6908 return tmp;
6910 /* Optimize fn(-x) into fn(x). */
6911 if ((tmp = fold_strip_sign_ops (arg)))
6912 return build_call_expr_loc (loc, fndecl, 1, tmp);
6915 return NULL_TREE;
6918 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
6919 Return NULL_TREE if no simplification can be made. */
6921 static tree
6922 fold_builtin_tan (tree arg, tree type)
6924 enum built_in_function fcode;
6925 tree res;
6927 if (!validate_arg (arg, REAL_TYPE))
6928 return NULL_TREE;
6930 /* Calculate the result when the argument is a constant. */
6931 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
6932 return res;
6934 /* Optimize tan(atan(x)) = x. */
6935 fcode = builtin_mathfn_code (arg);
6936 if (flag_unsafe_math_optimizations
6937 && (fcode == BUILT_IN_ATAN
6938 || fcode == BUILT_IN_ATANF
6939 || fcode == BUILT_IN_ATANL))
6940 return CALL_EXPR_ARG (arg, 0);
6942 return NULL_TREE;
6945 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
6946 NULL_TREE if no simplification can be made. */
6948 static tree
6949 fold_builtin_sincos (location_t loc,
6950 tree arg0, tree arg1, tree arg2)
6952 tree type;
6953 tree res, fn, call;
6955 if (!validate_arg (arg0, REAL_TYPE)
6956 || !validate_arg (arg1, POINTER_TYPE)
6957 || !validate_arg (arg2, POINTER_TYPE))
6958 return NULL_TREE;
6960 type = TREE_TYPE (arg0);
6962 /* Calculate the result when the argument is a constant. */
6963 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
6964 return res;
6966 /* Canonicalize sincos to cexpi. */
6967 if (!TARGET_C99_FUNCTIONS)
6968 return NULL_TREE;
6969 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
6970 if (!fn)
6971 return NULL_TREE;
6973 call = build_call_expr_loc (loc, fn, 1, arg0);
6974 call = builtin_save_expr (call);
6976 return build2 (COMPOUND_EXPR, void_type_node,
6977 build2 (MODIFY_EXPR, void_type_node,
6978 build_fold_indirect_ref_loc (loc, arg1),
6979 build1 (IMAGPART_EXPR, type, call)),
6980 build2 (MODIFY_EXPR, void_type_node,
6981 build_fold_indirect_ref_loc (loc, arg2),
6982 build1 (REALPART_EXPR, type, call)));
6985 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
6986 NULL_TREE if no simplification can be made. */
6988 static tree
6989 fold_builtin_cexp (location_t loc, tree arg0, tree type)
6991 tree rtype;
6992 tree realp, imagp, ifn;
6993 tree res;
6995 if (!validate_arg (arg0, COMPLEX_TYPE)
6996 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
6997 return NULL_TREE;
6999 /* Calculate the result when the argument is a constant. */
7000 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7001 return res;
7003 rtype = TREE_TYPE (TREE_TYPE (arg0));
7005 /* In case we can figure out the real part of arg0 and it is constant zero
7006 fold to cexpi. */
7007 if (!TARGET_C99_FUNCTIONS)
7008 return NULL_TREE;
7009 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7010 if (!ifn)
7011 return NULL_TREE;
7013 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7014 && real_zerop (realp))
7016 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7017 return build_call_expr_loc (loc, ifn, 1, narg);
7020 /* In case we can easily decompose real and imaginary parts split cexp
7021 to exp (r) * cexpi (i). */
7022 if (flag_unsafe_math_optimizations
7023 && realp)
7025 tree rfn, rcall, icall;
7027 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7028 if (!rfn)
7029 return NULL_TREE;
7031 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7032 if (!imagp)
7033 return NULL_TREE;
7035 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7036 icall = builtin_save_expr (icall);
7037 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7038 rcall = builtin_save_expr (rcall);
7039 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7040 fold_build2_loc (loc, MULT_EXPR, rtype,
7041 rcall,
7042 fold_build1_loc (loc, REALPART_EXPR,
7043 rtype, icall)),
7044 fold_build2_loc (loc, MULT_EXPR, rtype,
7045 rcall,
7046 fold_build1_loc (loc, IMAGPART_EXPR,
7047 rtype, icall)));
7050 return NULL_TREE;
7053 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7054 Return NULL_TREE if no simplification can be made. */
7056 static tree
7057 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7059 if (!validate_arg (arg, REAL_TYPE))
7060 return NULL_TREE;
7062 /* Optimize trunc of constant value. */
7063 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7065 REAL_VALUE_TYPE r, x;
7066 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7068 x = TREE_REAL_CST (arg);
7069 real_trunc (&r, TYPE_MODE (type), &x);
7070 return build_real (type, r);
7073 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7076 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7077 Return NULL_TREE if no simplification can be made. */
7079 static tree
7080 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7082 if (!validate_arg (arg, REAL_TYPE))
7083 return NULL_TREE;
7085 /* Optimize floor of constant value. */
7086 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7088 REAL_VALUE_TYPE x;
7090 x = TREE_REAL_CST (arg);
7091 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7093 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7094 REAL_VALUE_TYPE r;
7096 real_floor (&r, TYPE_MODE (type), &x);
7097 return build_real (type, r);
7101 /* Fold floor (x) where x is nonnegative to trunc (x). */
7102 if (tree_expr_nonnegative_p (arg))
7104 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7105 if (truncfn)
7106 return build_call_expr_loc (loc, truncfn, 1, arg);
7109 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7112 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7113 Return NULL_TREE if no simplification can be made. */
7115 static tree
7116 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7118 if (!validate_arg (arg, REAL_TYPE))
7119 return NULL_TREE;
7121 /* Optimize ceil of constant value. */
7122 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7124 REAL_VALUE_TYPE x;
7126 x = TREE_REAL_CST (arg);
7127 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7129 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7130 REAL_VALUE_TYPE r;
7132 real_ceil (&r, TYPE_MODE (type), &x);
7133 return build_real (type, r);
7137 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7140 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7141 Return NULL_TREE if no simplification can be made. */
7143 static tree
7144 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7146 if (!validate_arg (arg, REAL_TYPE))
7147 return NULL_TREE;
7149 /* Optimize round of constant value. */
7150 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7152 REAL_VALUE_TYPE x;
7154 x = TREE_REAL_CST (arg);
7155 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7157 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7158 REAL_VALUE_TYPE r;
7160 real_round (&r, TYPE_MODE (type), &x);
7161 return build_real (type, r);
7165 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7168 /* Fold function call to builtin lround, lroundf or lroundl (or the
7169 corresponding long long versions) and other rounding functions. ARG
7170 is the argument to the call. Return NULL_TREE if no simplification
7171 can be made. */
7173 static tree
7174 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7176 if (!validate_arg (arg, REAL_TYPE))
7177 return NULL_TREE;
7179 /* Optimize lround of constant value. */
7180 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7182 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7184 if (real_isfinite (&x))
7186 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7187 tree ftype = TREE_TYPE (arg);
7188 double_int val;
7189 REAL_VALUE_TYPE r;
7191 switch (DECL_FUNCTION_CODE (fndecl))
7193 CASE_FLT_FN (BUILT_IN_LFLOOR):
7194 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7195 real_floor (&r, TYPE_MODE (ftype), &x);
7196 break;
7198 CASE_FLT_FN (BUILT_IN_LCEIL):
7199 CASE_FLT_FN (BUILT_IN_LLCEIL):
7200 real_ceil (&r, TYPE_MODE (ftype), &x);
7201 break;
7203 CASE_FLT_FN (BUILT_IN_LROUND):
7204 CASE_FLT_FN (BUILT_IN_LLROUND):
7205 real_round (&r, TYPE_MODE (ftype), &x);
7206 break;
7208 default:
7209 gcc_unreachable ();
7212 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7213 if (double_int_fits_to_tree_p (itype, val))
7214 return double_int_to_tree (itype, val);
7218 switch (DECL_FUNCTION_CODE (fndecl))
7220 CASE_FLT_FN (BUILT_IN_LFLOOR):
7221 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7222 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7223 if (tree_expr_nonnegative_p (arg))
7224 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7225 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7226 break;
7227 default:;
7230 return fold_fixed_mathfn (loc, fndecl, arg);
7233 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7234 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7235 the argument to the call. Return NULL_TREE if no simplification can
7236 be made. */
7238 static tree
7239 fold_builtin_bitop (tree fndecl, tree arg)
7241 if (!validate_arg (arg, INTEGER_TYPE))
7242 return NULL_TREE;
7244 /* Optimize for constant argument. */
7245 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7247 HOST_WIDE_INT hi, width, result;
7248 unsigned HOST_WIDE_INT lo;
7249 tree type;
7251 type = TREE_TYPE (arg);
7252 width = TYPE_PRECISION (type);
7253 lo = TREE_INT_CST_LOW (arg);
7255 /* Clear all the bits that are beyond the type's precision. */
7256 if (width > HOST_BITS_PER_WIDE_INT)
7258 hi = TREE_INT_CST_HIGH (arg);
7259 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7260 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7262 else
7264 hi = 0;
7265 if (width < HOST_BITS_PER_WIDE_INT)
7266 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7269 switch (DECL_FUNCTION_CODE (fndecl))
7271 CASE_INT_FN (BUILT_IN_FFS):
7272 if (lo != 0)
7273 result = ffs_hwi (lo);
7274 else if (hi != 0)
7275 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7276 else
7277 result = 0;
7278 break;
7280 CASE_INT_FN (BUILT_IN_CLZ):
7281 if (hi != 0)
7282 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7283 else if (lo != 0)
7284 result = width - floor_log2 (lo) - 1;
7285 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7286 result = width;
7287 break;
7289 CASE_INT_FN (BUILT_IN_CTZ):
7290 if (lo != 0)
7291 result = ctz_hwi (lo);
7292 else if (hi != 0)
7293 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7294 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7295 result = width;
7296 break;
7298 CASE_INT_FN (BUILT_IN_POPCOUNT):
7299 result = 0;
7300 while (lo)
7301 result++, lo &= lo - 1;
7302 while (hi)
7303 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7304 break;
7306 CASE_INT_FN (BUILT_IN_PARITY):
7307 result = 0;
7308 while (lo)
7309 result++, lo &= lo - 1;
7310 while (hi)
7311 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7312 result &= 1;
7313 break;
7315 default:
7316 gcc_unreachable ();
7319 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7322 return NULL_TREE;
7325 /* Fold function call to builtin_bswap and the long and long long
7326 variants. Return NULL_TREE if no simplification can be made. */
7327 static tree
7328 fold_builtin_bswap (tree fndecl, tree arg)
7330 if (! validate_arg (arg, INTEGER_TYPE))
7331 return NULL_TREE;
7333 /* Optimize constant value. */
7334 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7336 HOST_WIDE_INT hi, width, r_hi = 0;
7337 unsigned HOST_WIDE_INT lo, r_lo = 0;
7338 tree type;
7340 type = TREE_TYPE (arg);
7341 width = TYPE_PRECISION (type);
7342 lo = TREE_INT_CST_LOW (arg);
7343 hi = TREE_INT_CST_HIGH (arg);
7345 switch (DECL_FUNCTION_CODE (fndecl))
7347 case BUILT_IN_BSWAP32:
7348 case BUILT_IN_BSWAP64:
7350 int s;
7352 for (s = 0; s < width; s += 8)
7354 int d = width - s - 8;
7355 unsigned HOST_WIDE_INT byte;
7357 if (s < HOST_BITS_PER_WIDE_INT)
7358 byte = (lo >> s) & 0xff;
7359 else
7360 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7362 if (d < HOST_BITS_PER_WIDE_INT)
7363 r_lo |= byte << d;
7364 else
7365 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7369 break;
7371 default:
7372 gcc_unreachable ();
7375 if (width < HOST_BITS_PER_WIDE_INT)
7376 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7377 else
7378 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7381 return NULL_TREE;
7384 /* A subroutine of fold_builtin to fold the various logarithmic
7385 functions. Return NULL_TREE if no simplification can me made.
7386 FUNC is the corresponding MPFR logarithm function. */
7388 static tree
7389 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7390 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7392 if (validate_arg (arg, REAL_TYPE))
7394 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7395 tree res;
7396 const enum built_in_function fcode = builtin_mathfn_code (arg);
7398 /* Calculate the result when the argument is a constant. */
7399 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7400 return res;
7402 /* Special case, optimize logN(expN(x)) = x. */
7403 if (flag_unsafe_math_optimizations
7404 && ((func == mpfr_log
7405 && (fcode == BUILT_IN_EXP
7406 || fcode == BUILT_IN_EXPF
7407 || fcode == BUILT_IN_EXPL))
7408 || (func == mpfr_log2
7409 && (fcode == BUILT_IN_EXP2
7410 || fcode == BUILT_IN_EXP2F
7411 || fcode == BUILT_IN_EXP2L))
7412 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7413 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7415 /* Optimize logN(func()) for various exponential functions. We
7416 want to determine the value "x" and the power "exponent" in
7417 order to transform logN(x**exponent) into exponent*logN(x). */
7418 if (flag_unsafe_math_optimizations)
7420 tree exponent = 0, x = 0;
7422 switch (fcode)
7424 CASE_FLT_FN (BUILT_IN_EXP):
7425 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7426 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7427 dconst_e ()));
7428 exponent = CALL_EXPR_ARG (arg, 0);
7429 break;
7430 CASE_FLT_FN (BUILT_IN_EXP2):
7431 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7432 x = build_real (type, dconst2);
7433 exponent = CALL_EXPR_ARG (arg, 0);
7434 break;
7435 CASE_FLT_FN (BUILT_IN_EXP10):
7436 CASE_FLT_FN (BUILT_IN_POW10):
7437 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7439 REAL_VALUE_TYPE dconst10;
7440 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7441 x = build_real (type, dconst10);
7443 exponent = CALL_EXPR_ARG (arg, 0);
7444 break;
7445 CASE_FLT_FN (BUILT_IN_SQRT):
7446 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7447 x = CALL_EXPR_ARG (arg, 0);
7448 exponent = build_real (type, dconsthalf);
7449 break;
7450 CASE_FLT_FN (BUILT_IN_CBRT):
7451 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7452 x = CALL_EXPR_ARG (arg, 0);
7453 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7454 dconst_third ()));
7455 break;
7456 CASE_FLT_FN (BUILT_IN_POW):
7457 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7458 x = CALL_EXPR_ARG (arg, 0);
7459 exponent = CALL_EXPR_ARG (arg, 1);
7460 break;
7461 default:
7462 break;
7465 /* Now perform the optimization. */
7466 if (x && exponent)
7468 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7469 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7474 return NULL_TREE;
7477 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7478 NULL_TREE if no simplification can be made. */
7480 static tree
7481 fold_builtin_hypot (location_t loc, tree fndecl,
7482 tree arg0, tree arg1, tree type)
7484 tree res, narg0, narg1;
7486 if (!validate_arg (arg0, REAL_TYPE)
7487 || !validate_arg (arg1, REAL_TYPE))
7488 return NULL_TREE;
7490 /* Calculate the result when the argument is a constant. */
7491 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7492 return res;
7494 /* If either argument to hypot has a negate or abs, strip that off.
7495 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7496 narg0 = fold_strip_sign_ops (arg0);
7497 narg1 = fold_strip_sign_ops (arg1);
7498 if (narg0 || narg1)
7500 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7501 narg1 ? narg1 : arg1);
7504 /* If either argument is zero, hypot is fabs of the other. */
7505 if (real_zerop (arg0))
7506 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7507 else if (real_zerop (arg1))
7508 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7510 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7511 if (flag_unsafe_math_optimizations
7512 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7514 const REAL_VALUE_TYPE sqrt2_trunc
7515 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7516 return fold_build2_loc (loc, MULT_EXPR, type,
7517 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7518 build_real (type, sqrt2_trunc));
7521 return NULL_TREE;
7525 /* Fold a builtin function call to pow, powf, or powl. Return
7526 NULL_TREE if no simplification can be made. */
7527 static tree
7528 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7530 tree res;
7532 if (!validate_arg (arg0, REAL_TYPE)
7533 || !validate_arg (arg1, REAL_TYPE))
7534 return NULL_TREE;
7536 /* Calculate the result when the argument is a constant. */
7537 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7538 return res;
7540 /* Optimize pow(1.0,y) = 1.0. */
7541 if (real_onep (arg0))
7542 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7544 if (TREE_CODE (arg1) == REAL_CST
7545 && !TREE_OVERFLOW (arg1))
7547 REAL_VALUE_TYPE cint;
7548 REAL_VALUE_TYPE c;
7549 HOST_WIDE_INT n;
7551 c = TREE_REAL_CST (arg1);
7553 /* Optimize pow(x,0.0) = 1.0. */
7554 if (REAL_VALUES_EQUAL (c, dconst0))
7555 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7556 arg0);
7558 /* Optimize pow(x,1.0) = x. */
7559 if (REAL_VALUES_EQUAL (c, dconst1))
7560 return arg0;
7562 /* Optimize pow(x,-1.0) = 1.0/x. */
7563 if (REAL_VALUES_EQUAL (c, dconstm1))
7564 return fold_build2_loc (loc, RDIV_EXPR, type,
7565 build_real (type, dconst1), arg0);
7567 /* Optimize pow(x,0.5) = sqrt(x). */
7568 if (flag_unsafe_math_optimizations
7569 && REAL_VALUES_EQUAL (c, dconsthalf))
7571 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7573 if (sqrtfn != NULL_TREE)
7574 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7577 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7578 if (flag_unsafe_math_optimizations)
7580 const REAL_VALUE_TYPE dconstroot
7581 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7583 if (REAL_VALUES_EQUAL (c, dconstroot))
7585 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7586 if (cbrtfn != NULL_TREE)
7587 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7591 /* Check for an integer exponent. */
7592 n = real_to_integer (&c);
7593 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7594 if (real_identical (&c, &cint))
7596 /* Attempt to evaluate pow at compile-time, unless this should
7597 raise an exception. */
7598 if (TREE_CODE (arg0) == REAL_CST
7599 && !TREE_OVERFLOW (arg0)
7600 && (n > 0
7601 || (!flag_trapping_math && !flag_errno_math)
7602 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7604 REAL_VALUE_TYPE x;
7605 bool inexact;
7607 x = TREE_REAL_CST (arg0);
7608 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7609 if (flag_unsafe_math_optimizations || !inexact)
7610 return build_real (type, x);
7613 /* Strip sign ops from even integer powers. */
7614 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7616 tree narg0 = fold_strip_sign_ops (arg0);
7617 if (narg0)
7618 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7623 if (flag_unsafe_math_optimizations)
7625 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7627 /* Optimize pow(expN(x),y) = expN(x*y). */
7628 if (BUILTIN_EXPONENT_P (fcode))
7630 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7631 tree arg = CALL_EXPR_ARG (arg0, 0);
7632 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7633 return build_call_expr_loc (loc, expfn, 1, arg);
7636 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7637 if (BUILTIN_SQRT_P (fcode))
7639 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7640 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7641 build_real (type, dconsthalf));
7642 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7645 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7646 if (BUILTIN_CBRT_P (fcode))
7648 tree arg = CALL_EXPR_ARG (arg0, 0);
7649 if (tree_expr_nonnegative_p (arg))
7651 const REAL_VALUE_TYPE dconstroot
7652 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7653 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7654 build_real (type, dconstroot));
7655 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7659 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7660 if (fcode == BUILT_IN_POW
7661 || fcode == BUILT_IN_POWF
7662 || fcode == BUILT_IN_POWL)
7664 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7665 if (tree_expr_nonnegative_p (arg00))
7667 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7668 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7669 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7674 return NULL_TREE;
7677 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7678 Return NULL_TREE if no simplification can be made. */
7679 static tree
7680 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7681 tree arg0, tree arg1, tree type)
7683 if (!validate_arg (arg0, REAL_TYPE)
7684 || !validate_arg (arg1, INTEGER_TYPE))
7685 return NULL_TREE;
7687 /* Optimize pow(1.0,y) = 1.0. */
7688 if (real_onep (arg0))
7689 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7691 if (host_integerp (arg1, 0))
7693 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7695 /* Evaluate powi at compile-time. */
7696 if (TREE_CODE (arg0) == REAL_CST
7697 && !TREE_OVERFLOW (arg0))
7699 REAL_VALUE_TYPE x;
7700 x = TREE_REAL_CST (arg0);
7701 real_powi (&x, TYPE_MODE (type), &x, c);
7702 return build_real (type, x);
7705 /* Optimize pow(x,0) = 1.0. */
7706 if (c == 0)
7707 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7708 arg0);
7710 /* Optimize pow(x,1) = x. */
7711 if (c == 1)
7712 return arg0;
7714 /* Optimize pow(x,-1) = 1.0/x. */
7715 if (c == -1)
7716 return fold_build2_loc (loc, RDIV_EXPR, type,
7717 build_real (type, dconst1), arg0);
7720 return NULL_TREE;
7723 /* A subroutine of fold_builtin to fold the various exponent
7724 functions. Return NULL_TREE if no simplification can be made.
7725 FUNC is the corresponding MPFR exponent function. */
7727 static tree
7728 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7729 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7731 if (validate_arg (arg, REAL_TYPE))
7733 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7734 tree res;
7736 /* Calculate the result when the argument is a constant. */
7737 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
7738 return res;
7740 /* Optimize expN(logN(x)) = x. */
7741 if (flag_unsafe_math_optimizations)
7743 const enum built_in_function fcode = builtin_mathfn_code (arg);
7745 if ((func == mpfr_exp
7746 && (fcode == BUILT_IN_LOG
7747 || fcode == BUILT_IN_LOGF
7748 || fcode == BUILT_IN_LOGL))
7749 || (func == mpfr_exp2
7750 && (fcode == BUILT_IN_LOG2
7751 || fcode == BUILT_IN_LOG2F
7752 || fcode == BUILT_IN_LOG2L))
7753 || (func == mpfr_exp10
7754 && (fcode == BUILT_IN_LOG10
7755 || fcode == BUILT_IN_LOG10F
7756 || fcode == BUILT_IN_LOG10L)))
7757 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7761 return NULL_TREE;
7764 /* Return true if VAR is a VAR_DECL or a component thereof. */
7766 static bool
7767 var_decl_component_p (tree var)
7769 tree inner = var;
7770 while (handled_component_p (inner))
7771 inner = TREE_OPERAND (inner, 0);
7772 return SSA_VAR_P (inner);
7775 /* Fold function call to builtin memset. Return
7776 NULL_TREE if no simplification can be made. */
7778 static tree
7779 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
7780 tree type, bool ignore)
7782 tree var, ret, etype;
7783 unsigned HOST_WIDE_INT length, cval;
7785 if (! validate_arg (dest, POINTER_TYPE)
7786 || ! validate_arg (c, INTEGER_TYPE)
7787 || ! validate_arg (len, INTEGER_TYPE))
7788 return NULL_TREE;
7790 if (! host_integerp (len, 1))
7791 return NULL_TREE;
7793 /* If the LEN parameter is zero, return DEST. */
7794 if (integer_zerop (len))
7795 return omit_one_operand_loc (loc, type, dest, c);
7797 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
7798 return NULL_TREE;
7800 var = dest;
7801 STRIP_NOPS (var);
7802 if (TREE_CODE (var) != ADDR_EXPR)
7803 return NULL_TREE;
7805 var = TREE_OPERAND (var, 0);
7806 if (TREE_THIS_VOLATILE (var))
7807 return NULL_TREE;
7809 etype = TREE_TYPE (var);
7810 if (TREE_CODE (etype) == ARRAY_TYPE)
7811 etype = TREE_TYPE (etype);
7813 if (!INTEGRAL_TYPE_P (etype)
7814 && !POINTER_TYPE_P (etype))
7815 return NULL_TREE;
7817 if (! var_decl_component_p (var))
7818 return NULL_TREE;
7820 length = tree_low_cst (len, 1);
7821 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
7822 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
7823 < length)
7824 return NULL_TREE;
7826 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
7827 return NULL_TREE;
7829 if (integer_zerop (c))
7830 cval = 0;
7831 else
7833 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
7834 return NULL_TREE;
7836 cval = TREE_INT_CST_LOW (c);
7837 cval &= 0xff;
7838 cval |= cval << 8;
7839 cval |= cval << 16;
7840 cval |= (cval << 31) << 1;
7843 ret = build_int_cst_type (etype, cval);
7844 var = build_fold_indirect_ref_loc (loc,
7845 fold_convert_loc (loc,
7846 build_pointer_type (etype),
7847 dest));
7848 ret = build2 (MODIFY_EXPR, etype, var, ret);
7849 if (ignore)
7850 return ret;
7852 return omit_one_operand_loc (loc, type, dest, ret);
7855 /* Fold function call to builtin memset. Return
7856 NULL_TREE if no simplification can be made. */
7858 static tree
7859 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
7861 if (! validate_arg (dest, POINTER_TYPE)
7862 || ! validate_arg (size, INTEGER_TYPE))
7863 return NULL_TREE;
7865 if (!ignore)
7866 return NULL_TREE;
7868 /* New argument list transforming bzero(ptr x, int y) to
7869 memset(ptr x, int 0, size_t y). This is done this way
7870 so that if it isn't expanded inline, we fallback to
7871 calling bzero instead of memset. */
7873 return fold_builtin_memset (loc, dest, integer_zero_node,
7874 fold_convert_loc (loc, sizetype, size),
7875 void_type_node, ignore);
7878 /* Fold function call to builtin mem{{,p}cpy,move}. Return
7879 NULL_TREE if no simplification can be made.
7880 If ENDP is 0, return DEST (like memcpy).
7881 If ENDP is 1, return DEST+LEN (like mempcpy).
7882 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
7883 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
7884 (memmove). */
7886 static tree
7887 fold_builtin_memory_op (location_t loc, tree dest, tree src,
7888 tree len, tree type, bool ignore, int endp)
7890 tree destvar, srcvar, expr;
7892 if (! validate_arg (dest, POINTER_TYPE)
7893 || ! validate_arg (src, POINTER_TYPE)
7894 || ! validate_arg (len, INTEGER_TYPE))
7895 return NULL_TREE;
7897 /* If the LEN parameter is zero, return DEST. */
7898 if (integer_zerop (len))
7899 return omit_one_operand_loc (loc, type, dest, src);
7901 /* If SRC and DEST are the same (and not volatile), return
7902 DEST{,+LEN,+LEN-1}. */
7903 if (operand_equal_p (src, dest, 0))
7904 expr = len;
7905 else
7907 tree srctype, desttype;
7908 unsigned int src_align, dest_align;
7909 tree off0;
7911 if (endp == 3)
7913 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
7914 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
7916 /* Both DEST and SRC must be pointer types.
7917 ??? This is what old code did. Is the testing for pointer types
7918 really mandatory?
7920 If either SRC is readonly or length is 1, we can use memcpy. */
7921 if (!dest_align || !src_align)
7922 return NULL_TREE;
7923 if (readonly_data_expr (src)
7924 || (host_integerp (len, 1)
7925 && (MIN (src_align, dest_align) / BITS_PER_UNIT
7926 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
7928 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
7929 if (!fn)
7930 return NULL_TREE;
7931 return build_call_expr_loc (loc, fn, 3, dest, src, len);
7934 /* If *src and *dest can't overlap, optimize into memcpy as well. */
7935 if (TREE_CODE (src) == ADDR_EXPR
7936 && TREE_CODE (dest) == ADDR_EXPR)
7938 tree src_base, dest_base, fn;
7939 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
7940 HOST_WIDE_INT size = -1;
7941 HOST_WIDE_INT maxsize = -1;
7943 srcvar = TREE_OPERAND (src, 0);
7944 src_base = get_ref_base_and_extent (srcvar, &src_offset,
7945 &size, &maxsize);
7946 destvar = TREE_OPERAND (dest, 0);
7947 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
7948 &size, &maxsize);
7949 if (host_integerp (len, 1))
7950 maxsize = tree_low_cst (len, 1);
7951 else
7952 maxsize = -1;
7953 src_offset /= BITS_PER_UNIT;
7954 dest_offset /= BITS_PER_UNIT;
7955 if (SSA_VAR_P (src_base)
7956 && SSA_VAR_P (dest_base))
7958 if (operand_equal_p (src_base, dest_base, 0)
7959 && ranges_overlap_p (src_offset, maxsize,
7960 dest_offset, maxsize))
7961 return NULL_TREE;
7963 else if (TREE_CODE (src_base) == MEM_REF
7964 && TREE_CODE (dest_base) == MEM_REF)
7966 double_int off;
7967 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
7968 TREE_OPERAND (dest_base, 0), 0))
7969 return NULL_TREE;
7970 off = double_int_add (mem_ref_offset (src_base),
7971 shwi_to_double_int (src_offset));
7972 if (!double_int_fits_in_shwi_p (off))
7973 return NULL_TREE;
7974 src_offset = off.low;
7975 off = double_int_add (mem_ref_offset (dest_base),
7976 shwi_to_double_int (dest_offset));
7977 if (!double_int_fits_in_shwi_p (off))
7978 return NULL_TREE;
7979 dest_offset = off.low;
7980 if (ranges_overlap_p (src_offset, maxsize,
7981 dest_offset, maxsize))
7982 return NULL_TREE;
7984 else
7985 return NULL_TREE;
7987 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
7988 if (!fn)
7989 return NULL_TREE;
7990 return build_call_expr_loc (loc, fn, 3, dest, src, len);
7993 /* If the destination and source do not alias optimize into
7994 memcpy as well. */
7995 if ((is_gimple_min_invariant (dest)
7996 || TREE_CODE (dest) == SSA_NAME)
7997 && (is_gimple_min_invariant (src)
7998 || TREE_CODE (src) == SSA_NAME))
8000 ao_ref destr, srcr;
8001 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8002 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8003 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8005 tree fn;
8006 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8007 if (!fn)
8008 return NULL_TREE;
8009 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8013 return NULL_TREE;
8016 if (!host_integerp (len, 0))
8017 return NULL_TREE;
8018 /* FIXME:
8019 This logic lose for arguments like (type *)malloc (sizeof (type)),
8020 since we strip the casts of up to VOID return value from malloc.
8021 Perhaps we ought to inherit type from non-VOID argument here? */
8022 STRIP_NOPS (src);
8023 STRIP_NOPS (dest);
8024 if (!POINTER_TYPE_P (TREE_TYPE (src))
8025 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8026 return NULL_TREE;
8027 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8028 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8030 tree tem = TREE_OPERAND (src, 0);
8031 STRIP_NOPS (tem);
8032 if (tem != TREE_OPERAND (src, 0))
8033 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8035 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8037 tree tem = TREE_OPERAND (dest, 0);
8038 STRIP_NOPS (tem);
8039 if (tem != TREE_OPERAND (dest, 0))
8040 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8042 srctype = TREE_TYPE (TREE_TYPE (src));
8043 if (TREE_CODE (srctype) == ARRAY_TYPE
8044 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8046 srctype = TREE_TYPE (srctype);
8047 STRIP_NOPS (src);
8048 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8050 desttype = TREE_TYPE (TREE_TYPE (dest));
8051 if (TREE_CODE (desttype) == ARRAY_TYPE
8052 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8054 desttype = TREE_TYPE (desttype);
8055 STRIP_NOPS (dest);
8056 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8058 if (TREE_ADDRESSABLE (srctype)
8059 || TREE_ADDRESSABLE (desttype))
8060 return NULL_TREE;
8062 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8063 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8064 if (dest_align < TYPE_ALIGN (desttype)
8065 || src_align < TYPE_ALIGN (srctype))
8066 return NULL_TREE;
8068 if (!ignore)
8069 dest = builtin_save_expr (dest);
8071 /* Build accesses at offset zero with a ref-all character type. */
8072 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8073 ptr_mode, true), 0);
8075 destvar = dest;
8076 STRIP_NOPS (destvar);
8077 if (TREE_CODE (destvar) == ADDR_EXPR
8078 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8079 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8080 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8081 else
8082 destvar = NULL_TREE;
8084 srcvar = src;
8085 STRIP_NOPS (srcvar);
8086 if (TREE_CODE (srcvar) == ADDR_EXPR
8087 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8088 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8090 if (!destvar
8091 || src_align >= TYPE_ALIGN (desttype))
8092 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8093 srcvar, off0);
8094 else if (!STRICT_ALIGNMENT)
8096 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8097 src_align);
8098 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8100 else
8101 srcvar = NULL_TREE;
8103 else
8104 srcvar = NULL_TREE;
8106 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8107 return NULL_TREE;
8109 if (srcvar == NULL_TREE)
8111 STRIP_NOPS (src);
8112 if (src_align >= TYPE_ALIGN (desttype))
8113 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8114 else
8116 if (STRICT_ALIGNMENT)
8117 return NULL_TREE;
8118 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8119 src_align);
8120 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8123 else if (destvar == NULL_TREE)
8125 STRIP_NOPS (dest);
8126 if (dest_align >= TYPE_ALIGN (srctype))
8127 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8128 else
8130 if (STRICT_ALIGNMENT)
8131 return NULL_TREE;
8132 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8133 dest_align);
8134 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8138 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8141 if (ignore)
8142 return expr;
8144 if (endp == 0 || endp == 3)
8145 return omit_one_operand_loc (loc, type, dest, expr);
8147 if (expr == len)
8148 expr = NULL_TREE;
8150 if (endp == 2)
8151 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8152 ssize_int (1));
8154 len = fold_convert_loc (loc, sizetype, len);
8155 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8156 dest = fold_convert_loc (loc, type, dest);
8157 if (expr)
8158 dest = omit_one_operand_loc (loc, type, dest, expr);
8159 return dest;
8162 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8163 If LEN is not NULL, it represents the length of the string to be
8164 copied. Return NULL_TREE if no simplification can be made. */
8166 tree
8167 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8169 tree fn;
8171 if (!validate_arg (dest, POINTER_TYPE)
8172 || !validate_arg (src, POINTER_TYPE))
8173 return NULL_TREE;
8175 /* If SRC and DEST are the same (and not volatile), return DEST. */
8176 if (operand_equal_p (src, dest, 0))
8177 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8179 if (optimize_function_for_size_p (cfun))
8180 return NULL_TREE;
8182 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8183 if (!fn)
8184 return NULL_TREE;
8186 if (!len)
8188 len = c_strlen (src, 1);
8189 if (! len || TREE_SIDE_EFFECTS (len))
8190 return NULL_TREE;
8193 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8194 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8195 build_call_expr_loc (loc, fn, 3, dest, src, len));
8198 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8199 Return NULL_TREE if no simplification can be made. */
8201 static tree
8202 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8204 tree fn, len, lenp1, call, type;
8206 if (!validate_arg (dest, POINTER_TYPE)
8207 || !validate_arg (src, POINTER_TYPE))
8208 return NULL_TREE;
8210 len = c_strlen (src, 1);
8211 if (!len
8212 || TREE_CODE (len) != INTEGER_CST)
8213 return NULL_TREE;
8215 if (optimize_function_for_size_p (cfun)
8216 /* If length is zero it's small enough. */
8217 && !integer_zerop (len))
8218 return NULL_TREE;
8220 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8221 if (!fn)
8222 return NULL_TREE;
8224 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8225 /* We use dest twice in building our expression. Save it from
8226 multiple expansions. */
8227 dest = builtin_save_expr (dest);
8228 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8230 type = TREE_TYPE (TREE_TYPE (fndecl));
8231 len = fold_convert_loc (loc, sizetype, len);
8232 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8233 dest = fold_convert_loc (loc, type, dest);
8234 dest = omit_one_operand_loc (loc, type, dest, call);
8235 return dest;
8238 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8239 If SLEN is not NULL, it represents the length of the source string.
8240 Return NULL_TREE if no simplification can be made. */
8242 tree
8243 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8244 tree src, tree len, tree slen)
8246 tree fn;
8248 if (!validate_arg (dest, POINTER_TYPE)
8249 || !validate_arg (src, POINTER_TYPE)
8250 || !validate_arg (len, INTEGER_TYPE))
8251 return NULL_TREE;
8253 /* If the LEN parameter is zero, return DEST. */
8254 if (integer_zerop (len))
8255 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8257 /* We can't compare slen with len as constants below if len is not a
8258 constant. */
8259 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8260 return NULL_TREE;
8262 if (!slen)
8263 slen = c_strlen (src, 1);
8265 /* Now, we must be passed a constant src ptr parameter. */
8266 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8267 return NULL_TREE;
8269 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8271 /* We do not support simplification of this case, though we do
8272 support it when expanding trees into RTL. */
8273 /* FIXME: generate a call to __builtin_memset. */
8274 if (tree_int_cst_lt (slen, len))
8275 return NULL_TREE;
8277 /* OK transform into builtin memcpy. */
8278 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8279 if (!fn)
8280 return NULL_TREE;
8281 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8282 build_call_expr_loc (loc, fn, 3, dest, src, len));
8285 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8286 arguments to the call, and TYPE is its return type.
8287 Return NULL_TREE if no simplification can be made. */
8289 static tree
8290 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8292 if (!validate_arg (arg1, POINTER_TYPE)
8293 || !validate_arg (arg2, INTEGER_TYPE)
8294 || !validate_arg (len, INTEGER_TYPE))
8295 return NULL_TREE;
8296 else
8298 const char *p1;
8300 if (TREE_CODE (arg2) != INTEGER_CST
8301 || !host_integerp (len, 1))
8302 return NULL_TREE;
8304 p1 = c_getstr (arg1);
8305 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8307 char c;
8308 const char *r;
8309 tree tem;
8311 if (target_char_cast (arg2, &c))
8312 return NULL_TREE;
8314 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8316 if (r == NULL)
8317 return build_int_cst (TREE_TYPE (arg1), 0);
8319 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8320 size_int (r - p1));
8321 return fold_convert_loc (loc, type, tem);
8323 return NULL_TREE;
8327 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8328 Return NULL_TREE if no simplification can be made. */
8330 static tree
8331 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8333 const char *p1, *p2;
8335 if (!validate_arg (arg1, POINTER_TYPE)
8336 || !validate_arg (arg2, POINTER_TYPE)
8337 || !validate_arg (len, INTEGER_TYPE))
8338 return NULL_TREE;
8340 /* If the LEN parameter is zero, return zero. */
8341 if (integer_zerop (len))
8342 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8343 arg1, arg2);
8345 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8346 if (operand_equal_p (arg1, arg2, 0))
8347 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8349 p1 = c_getstr (arg1);
8350 p2 = c_getstr (arg2);
8352 /* If all arguments are constant, and the value of len is not greater
8353 than the lengths of arg1 and arg2, evaluate at compile-time. */
8354 if (host_integerp (len, 1) && p1 && p2
8355 && compare_tree_int (len, strlen (p1) + 1) <= 0
8356 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8358 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8360 if (r > 0)
8361 return integer_one_node;
8362 else if (r < 0)
8363 return integer_minus_one_node;
8364 else
8365 return integer_zero_node;
8368 /* If len parameter is one, return an expression corresponding to
8369 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8370 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8372 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8373 tree cst_uchar_ptr_node
8374 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8376 tree ind1
8377 = fold_convert_loc (loc, integer_type_node,
8378 build1 (INDIRECT_REF, cst_uchar_node,
8379 fold_convert_loc (loc,
8380 cst_uchar_ptr_node,
8381 arg1)));
8382 tree ind2
8383 = fold_convert_loc (loc, integer_type_node,
8384 build1 (INDIRECT_REF, cst_uchar_node,
8385 fold_convert_loc (loc,
8386 cst_uchar_ptr_node,
8387 arg2)));
8388 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8391 return NULL_TREE;
8394 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8395 Return NULL_TREE if no simplification can be made. */
8397 static tree
8398 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8400 const char *p1, *p2;
8402 if (!validate_arg (arg1, POINTER_TYPE)
8403 || !validate_arg (arg2, POINTER_TYPE))
8404 return NULL_TREE;
8406 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8407 if (operand_equal_p (arg1, arg2, 0))
8408 return integer_zero_node;
8410 p1 = c_getstr (arg1);
8411 p2 = c_getstr (arg2);
8413 if (p1 && p2)
8415 const int i = strcmp (p1, p2);
8416 if (i < 0)
8417 return integer_minus_one_node;
8418 else if (i > 0)
8419 return integer_one_node;
8420 else
8421 return integer_zero_node;
8424 /* If the second arg is "", return *(const unsigned char*)arg1. */
8425 if (p2 && *p2 == '\0')
8427 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8428 tree cst_uchar_ptr_node
8429 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8431 return fold_convert_loc (loc, integer_type_node,
8432 build1 (INDIRECT_REF, cst_uchar_node,
8433 fold_convert_loc (loc,
8434 cst_uchar_ptr_node,
8435 arg1)));
8438 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8439 if (p1 && *p1 == '\0')
8441 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8442 tree cst_uchar_ptr_node
8443 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8445 tree temp
8446 = fold_convert_loc (loc, integer_type_node,
8447 build1 (INDIRECT_REF, cst_uchar_node,
8448 fold_convert_loc (loc,
8449 cst_uchar_ptr_node,
8450 arg2)));
8451 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8454 return NULL_TREE;
8457 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8458 Return NULL_TREE if no simplification can be made. */
8460 static tree
8461 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8463 const char *p1, *p2;
8465 if (!validate_arg (arg1, POINTER_TYPE)
8466 || !validate_arg (arg2, POINTER_TYPE)
8467 || !validate_arg (len, INTEGER_TYPE))
8468 return NULL_TREE;
8470 /* If the LEN parameter is zero, return zero. */
8471 if (integer_zerop (len))
8472 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8473 arg1, arg2);
8475 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8476 if (operand_equal_p (arg1, arg2, 0))
8477 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8479 p1 = c_getstr (arg1);
8480 p2 = c_getstr (arg2);
8482 if (host_integerp (len, 1) && p1 && p2)
8484 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8485 if (i > 0)
8486 return integer_one_node;
8487 else if (i < 0)
8488 return integer_minus_one_node;
8489 else
8490 return integer_zero_node;
8493 /* If the second arg is "", and the length is greater than zero,
8494 return *(const unsigned char*)arg1. */
8495 if (p2 && *p2 == '\0'
8496 && TREE_CODE (len) == INTEGER_CST
8497 && tree_int_cst_sgn (len) == 1)
8499 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8500 tree cst_uchar_ptr_node
8501 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8503 return fold_convert_loc (loc, integer_type_node,
8504 build1 (INDIRECT_REF, cst_uchar_node,
8505 fold_convert_loc (loc,
8506 cst_uchar_ptr_node,
8507 arg1)));
8510 /* If the first arg is "", and the length is greater than zero,
8511 return -*(const unsigned char*)arg2. */
8512 if (p1 && *p1 == '\0'
8513 && TREE_CODE (len) == INTEGER_CST
8514 && tree_int_cst_sgn (len) == 1)
8516 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8517 tree cst_uchar_ptr_node
8518 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8520 tree temp = fold_convert_loc (loc, integer_type_node,
8521 build1 (INDIRECT_REF, cst_uchar_node,
8522 fold_convert_loc (loc,
8523 cst_uchar_ptr_node,
8524 arg2)));
8525 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8528 /* If len parameter is one, return an expression corresponding to
8529 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8530 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8532 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8533 tree cst_uchar_ptr_node
8534 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8536 tree ind1 = fold_convert_loc (loc, integer_type_node,
8537 build1 (INDIRECT_REF, cst_uchar_node,
8538 fold_convert_loc (loc,
8539 cst_uchar_ptr_node,
8540 arg1)));
8541 tree ind2 = fold_convert_loc (loc, integer_type_node,
8542 build1 (INDIRECT_REF, cst_uchar_node,
8543 fold_convert_loc (loc,
8544 cst_uchar_ptr_node,
8545 arg2)));
8546 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8549 return NULL_TREE;
8552 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8553 ARG. Return NULL_TREE if no simplification can be made. */
8555 static tree
8556 fold_builtin_signbit (location_t loc, tree arg, tree type)
8558 if (!validate_arg (arg, REAL_TYPE))
8559 return NULL_TREE;
8561 /* If ARG is a compile-time constant, determine the result. */
8562 if (TREE_CODE (arg) == REAL_CST
8563 && !TREE_OVERFLOW (arg))
8565 REAL_VALUE_TYPE c;
8567 c = TREE_REAL_CST (arg);
8568 return (REAL_VALUE_NEGATIVE (c)
8569 ? build_one_cst (type)
8570 : build_zero_cst (type));
8573 /* If ARG is non-negative, the result is always zero. */
8574 if (tree_expr_nonnegative_p (arg))
8575 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8577 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8578 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8579 return fold_build2_loc (loc, LT_EXPR, type, arg,
8580 build_real (TREE_TYPE (arg), dconst0));
8582 return NULL_TREE;
8585 /* Fold function call to builtin copysign, copysignf or copysignl with
8586 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8587 be made. */
8589 static tree
8590 fold_builtin_copysign (location_t loc, tree fndecl,
8591 tree arg1, tree arg2, tree type)
8593 tree tem;
8595 if (!validate_arg (arg1, REAL_TYPE)
8596 || !validate_arg (arg2, REAL_TYPE))
8597 return NULL_TREE;
8599 /* copysign(X,X) is X. */
8600 if (operand_equal_p (arg1, arg2, 0))
8601 return fold_convert_loc (loc, type, arg1);
8603 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8604 if (TREE_CODE (arg1) == REAL_CST
8605 && TREE_CODE (arg2) == REAL_CST
8606 && !TREE_OVERFLOW (arg1)
8607 && !TREE_OVERFLOW (arg2))
8609 REAL_VALUE_TYPE c1, c2;
8611 c1 = TREE_REAL_CST (arg1);
8612 c2 = TREE_REAL_CST (arg2);
8613 /* c1.sign := c2.sign. */
8614 real_copysign (&c1, &c2);
8615 return build_real (type, c1);
8618 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8619 Remember to evaluate Y for side-effects. */
8620 if (tree_expr_nonnegative_p (arg2))
8621 return omit_one_operand_loc (loc, type,
8622 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8623 arg2);
8625 /* Strip sign changing operations for the first argument. */
8626 tem = fold_strip_sign_ops (arg1);
8627 if (tem)
8628 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8630 return NULL_TREE;
8633 /* Fold a call to builtin isascii with argument ARG. */
8635 static tree
8636 fold_builtin_isascii (location_t loc, tree arg)
8638 if (!validate_arg (arg, INTEGER_TYPE))
8639 return NULL_TREE;
8640 else
8642 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8643 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8644 build_int_cst (integer_type_node,
8645 ~ (unsigned HOST_WIDE_INT) 0x7f));
8646 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8647 arg, integer_zero_node);
8651 /* Fold a call to builtin toascii with argument ARG. */
8653 static tree
8654 fold_builtin_toascii (location_t loc, tree arg)
8656 if (!validate_arg (arg, INTEGER_TYPE))
8657 return NULL_TREE;
8659 /* Transform toascii(c) -> (c & 0x7f). */
8660 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8661 build_int_cst (integer_type_node, 0x7f));
8664 /* Fold a call to builtin isdigit with argument ARG. */
8666 static tree
8667 fold_builtin_isdigit (location_t loc, tree arg)
8669 if (!validate_arg (arg, INTEGER_TYPE))
8670 return NULL_TREE;
8671 else
8673 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8674 /* According to the C standard, isdigit is unaffected by locale.
8675 However, it definitely is affected by the target character set. */
8676 unsigned HOST_WIDE_INT target_digit0
8677 = lang_hooks.to_target_charset ('0');
8679 if (target_digit0 == 0)
8680 return NULL_TREE;
8682 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8683 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8684 build_int_cst (unsigned_type_node, target_digit0));
8685 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8686 build_int_cst (unsigned_type_node, 9));
8690 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8692 static tree
8693 fold_builtin_fabs (location_t loc, tree arg, tree type)
8695 if (!validate_arg (arg, REAL_TYPE))
8696 return NULL_TREE;
8698 arg = fold_convert_loc (loc, type, arg);
8699 if (TREE_CODE (arg) == REAL_CST)
8700 return fold_abs_const (arg, type);
8701 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8704 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8706 static tree
8707 fold_builtin_abs (location_t loc, tree arg, tree type)
8709 if (!validate_arg (arg, INTEGER_TYPE))
8710 return NULL_TREE;
8712 arg = fold_convert_loc (loc, type, arg);
8713 if (TREE_CODE (arg) == INTEGER_CST)
8714 return fold_abs_const (arg, type);
8715 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8718 /* Fold a fma operation with arguments ARG[012]. */
8720 tree
8721 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8722 tree type, tree arg0, tree arg1, tree arg2)
8724 if (TREE_CODE (arg0) == REAL_CST
8725 && TREE_CODE (arg1) == REAL_CST
8726 && TREE_CODE (arg2) == REAL_CST)
8727 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8729 return NULL_TREE;
8732 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8734 static tree
8735 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8737 if (validate_arg (arg0, REAL_TYPE)
8738 && validate_arg(arg1, REAL_TYPE)
8739 && validate_arg(arg2, REAL_TYPE))
8741 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
8742 if (tem)
8743 return tem;
8745 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8746 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8747 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8749 return NULL_TREE;
8752 /* Fold a call to builtin fmin or fmax. */
8754 static tree
8755 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8756 tree type, bool max)
8758 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8760 /* Calculate the result when the argument is a constant. */
8761 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8763 if (res)
8764 return res;
8766 /* If either argument is NaN, return the other one. Avoid the
8767 transformation if we get (and honor) a signalling NaN. Using
8768 omit_one_operand() ensures we create a non-lvalue. */
8769 if (TREE_CODE (arg0) == REAL_CST
8770 && real_isnan (&TREE_REAL_CST (arg0))
8771 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8772 || ! TREE_REAL_CST (arg0).signalling))
8773 return omit_one_operand_loc (loc, type, arg1, arg0);
8774 if (TREE_CODE (arg1) == REAL_CST
8775 && real_isnan (&TREE_REAL_CST (arg1))
8776 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
8777 || ! TREE_REAL_CST (arg1).signalling))
8778 return omit_one_operand_loc (loc, type, arg0, arg1);
8780 /* Transform fmin/fmax(x,x) -> x. */
8781 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8782 return omit_one_operand_loc (loc, type, arg0, arg1);
8784 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8785 functions to return the numeric arg if the other one is NaN.
8786 These tree codes don't honor that, so only transform if
8787 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8788 handled, so we don't have to worry about it either. */
8789 if (flag_finite_math_only)
8790 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
8791 fold_convert_loc (loc, type, arg0),
8792 fold_convert_loc (loc, type, arg1));
8794 return NULL_TREE;
8797 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8799 static tree
8800 fold_builtin_carg (location_t loc, tree arg, tree type)
8802 if (validate_arg (arg, COMPLEX_TYPE)
8803 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8805 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8807 if (atan2_fn)
8809 tree new_arg = builtin_save_expr (arg);
8810 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8811 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8812 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8816 return NULL_TREE;
8819 /* Fold a call to builtin logb/ilogb. */
8821 static tree
8822 fold_builtin_logb (location_t loc, tree arg, tree rettype)
8824 if (! validate_arg (arg, REAL_TYPE))
8825 return NULL_TREE;
8827 STRIP_NOPS (arg);
8829 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8831 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8833 switch (value->cl)
8835 case rvc_nan:
8836 case rvc_inf:
8837 /* If arg is Inf or NaN and we're logb, return it. */
8838 if (TREE_CODE (rettype) == REAL_TYPE)
8839 return fold_convert_loc (loc, rettype, arg);
8840 /* Fall through... */
8841 case rvc_zero:
8842 /* Zero may set errno and/or raise an exception for logb, also
8843 for ilogb we don't know FP_ILOGB0. */
8844 return NULL_TREE;
8845 case rvc_normal:
8846 /* For normal numbers, proceed iff radix == 2. In GCC,
8847 normalized significands are in the range [0.5, 1.0). We
8848 want the exponent as if they were [1.0, 2.0) so get the
8849 exponent and subtract 1. */
8850 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8851 return fold_convert_loc (loc, rettype,
8852 build_int_cst (integer_type_node,
8853 REAL_EXP (value)-1));
8854 break;
8858 return NULL_TREE;
8861 /* Fold a call to builtin significand, if radix == 2. */
8863 static tree
8864 fold_builtin_significand (location_t loc, tree arg, tree rettype)
8866 if (! validate_arg (arg, REAL_TYPE))
8867 return NULL_TREE;
8869 STRIP_NOPS (arg);
8871 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8873 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8875 switch (value->cl)
8877 case rvc_zero:
8878 case rvc_nan:
8879 case rvc_inf:
8880 /* If arg is +-0, +-Inf or +-NaN, then return it. */
8881 return fold_convert_loc (loc, rettype, arg);
8882 case rvc_normal:
8883 /* For normal numbers, proceed iff radix == 2. */
8884 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8886 REAL_VALUE_TYPE result = *value;
8887 /* In GCC, normalized significands are in the range [0.5,
8888 1.0). We want them to be [1.0, 2.0) so set the
8889 exponent to 1. */
8890 SET_REAL_EXP (&result, 1);
8891 return build_real (rettype, result);
8893 break;
8897 return NULL_TREE;
8900 /* Fold a call to builtin frexp, we can assume the base is 2. */
8902 static tree
8903 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8905 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8906 return NULL_TREE;
8908 STRIP_NOPS (arg0);
8910 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8911 return NULL_TREE;
8913 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8915 /* Proceed if a valid pointer type was passed in. */
8916 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8918 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8919 tree frac, exp;
8921 switch (value->cl)
8923 case rvc_zero:
8924 /* For +-0, return (*exp = 0, +-0). */
8925 exp = integer_zero_node;
8926 frac = arg0;
8927 break;
8928 case rvc_nan:
8929 case rvc_inf:
8930 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8931 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8932 case rvc_normal:
8934 /* Since the frexp function always expects base 2, and in
8935 GCC normalized significands are already in the range
8936 [0.5, 1.0), we have exactly what frexp wants. */
8937 REAL_VALUE_TYPE frac_rvt = *value;
8938 SET_REAL_EXP (&frac_rvt, 0);
8939 frac = build_real (rettype, frac_rvt);
8940 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8942 break;
8943 default:
8944 gcc_unreachable ();
8947 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8948 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8949 TREE_SIDE_EFFECTS (arg1) = 1;
8950 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8953 return NULL_TREE;
8956 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
8957 then we can assume the base is two. If it's false, then we have to
8958 check the mode of the TYPE parameter in certain cases. */
8960 static tree
8961 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
8962 tree type, bool ldexp)
8964 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
8966 STRIP_NOPS (arg0);
8967 STRIP_NOPS (arg1);
8969 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
8970 if (real_zerop (arg0) || integer_zerop (arg1)
8971 || (TREE_CODE (arg0) == REAL_CST
8972 && !real_isfinite (&TREE_REAL_CST (arg0))))
8973 return omit_one_operand_loc (loc, type, arg0, arg1);
8975 /* If both arguments are constant, then try to evaluate it. */
8976 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
8977 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
8978 && host_integerp (arg1, 0))
8980 /* Bound the maximum adjustment to twice the range of the
8981 mode's valid exponents. Use abs to ensure the range is
8982 positive as a sanity check. */
8983 const long max_exp_adj = 2 *
8984 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
8985 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
8987 /* Get the user-requested adjustment. */
8988 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
8990 /* The requested adjustment must be inside this range. This
8991 is a preliminary cap to avoid things like overflow, we
8992 may still fail to compute the result for other reasons. */
8993 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
8995 REAL_VALUE_TYPE initial_result;
8997 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
8999 /* Ensure we didn't overflow. */
9000 if (! real_isinf (&initial_result))
9002 const REAL_VALUE_TYPE trunc_result
9003 = real_value_truncate (TYPE_MODE (type), initial_result);
9005 /* Only proceed if the target mode can hold the
9006 resulting value. */
9007 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9008 return build_real (type, trunc_result);
9014 return NULL_TREE;
9017 /* Fold a call to builtin modf. */
9019 static tree
9020 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9022 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9023 return NULL_TREE;
9025 STRIP_NOPS (arg0);
9027 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9028 return NULL_TREE;
9030 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9032 /* Proceed if a valid pointer type was passed in. */
9033 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9035 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9036 REAL_VALUE_TYPE trunc, frac;
9038 switch (value->cl)
9040 case rvc_nan:
9041 case rvc_zero:
9042 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9043 trunc = frac = *value;
9044 break;
9045 case rvc_inf:
9046 /* For +-Inf, return (*arg1 = arg0, +-0). */
9047 frac = dconst0;
9048 frac.sign = value->sign;
9049 trunc = *value;
9050 break;
9051 case rvc_normal:
9052 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9053 real_trunc (&trunc, VOIDmode, value);
9054 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9055 /* If the original number was negative and already
9056 integral, then the fractional part is -0.0. */
9057 if (value->sign && frac.cl == rvc_zero)
9058 frac.sign = value->sign;
9059 break;
9062 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9063 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9064 build_real (rettype, trunc));
9065 TREE_SIDE_EFFECTS (arg1) = 1;
9066 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9067 build_real (rettype, frac));
9070 return NULL_TREE;
9073 /* Given a location LOC, an interclass builtin function decl FNDECL
9074 and its single argument ARG, return an folded expression computing
9075 the same, or NULL_TREE if we either couldn't or didn't want to fold
9076 (the latter happen if there's an RTL instruction available). */
9078 static tree
9079 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9081 enum machine_mode mode;
9083 if (!validate_arg (arg, REAL_TYPE))
9084 return NULL_TREE;
9086 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9087 return NULL_TREE;
9089 mode = TYPE_MODE (TREE_TYPE (arg));
9091 /* If there is no optab, try generic code. */
9092 switch (DECL_FUNCTION_CODE (fndecl))
9094 tree result;
9096 CASE_FLT_FN (BUILT_IN_ISINF):
9098 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9099 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9100 tree const type = TREE_TYPE (arg);
9101 REAL_VALUE_TYPE r;
9102 char buf[128];
9104 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9105 real_from_string (&r, buf);
9106 result = build_call_expr (isgr_fn, 2,
9107 fold_build1_loc (loc, ABS_EXPR, type, arg),
9108 build_real (type, r));
9109 return result;
9111 CASE_FLT_FN (BUILT_IN_FINITE):
9112 case BUILT_IN_ISFINITE:
9114 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9115 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9116 tree const type = TREE_TYPE (arg);
9117 REAL_VALUE_TYPE r;
9118 char buf[128];
9120 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9121 real_from_string (&r, buf);
9122 result = build_call_expr (isle_fn, 2,
9123 fold_build1_loc (loc, ABS_EXPR, type, arg),
9124 build_real (type, r));
9125 /*result = fold_build2_loc (loc, UNGT_EXPR,
9126 TREE_TYPE (TREE_TYPE (fndecl)),
9127 fold_build1_loc (loc, ABS_EXPR, type, arg),
9128 build_real (type, r));
9129 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9130 TREE_TYPE (TREE_TYPE (fndecl)),
9131 result);*/
9132 return result;
9134 case BUILT_IN_ISNORMAL:
9136 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9137 islessequal(fabs(x),DBL_MAX). */
9138 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9139 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9140 tree const type = TREE_TYPE (arg);
9141 REAL_VALUE_TYPE rmax, rmin;
9142 char buf[128];
9144 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9145 real_from_string (&rmax, buf);
9146 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9147 real_from_string (&rmin, buf);
9148 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9149 result = build_call_expr (isle_fn, 2, arg,
9150 build_real (type, rmax));
9151 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9152 build_call_expr (isge_fn, 2, arg,
9153 build_real (type, rmin)));
9154 return result;
9156 default:
9157 break;
9160 return NULL_TREE;
9163 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9164 ARG is the argument for the call. */
9166 static tree
9167 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9169 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9170 REAL_VALUE_TYPE r;
9172 if (!validate_arg (arg, REAL_TYPE))
9173 return NULL_TREE;
9175 switch (builtin_index)
9177 case BUILT_IN_ISINF:
9178 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9179 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9181 if (TREE_CODE (arg) == REAL_CST)
9183 r = TREE_REAL_CST (arg);
9184 if (real_isinf (&r))
9185 return real_compare (GT_EXPR, &r, &dconst0)
9186 ? integer_one_node : integer_minus_one_node;
9187 else
9188 return integer_zero_node;
9191 return NULL_TREE;
9193 case BUILT_IN_ISINF_SIGN:
9195 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9196 /* In a boolean context, GCC will fold the inner COND_EXPR to
9197 1. So e.g. "if (isinf_sign(x))" would be folded to just
9198 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9199 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9200 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9201 tree tmp = NULL_TREE;
9203 arg = builtin_save_expr (arg);
9205 if (signbit_fn && isinf_fn)
9207 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9208 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9210 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9211 signbit_call, integer_zero_node);
9212 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9213 isinf_call, integer_zero_node);
9215 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9216 integer_minus_one_node, integer_one_node);
9217 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9218 isinf_call, tmp,
9219 integer_zero_node);
9222 return tmp;
9225 case BUILT_IN_ISFINITE:
9226 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9227 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9228 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9230 if (TREE_CODE (arg) == REAL_CST)
9232 r = TREE_REAL_CST (arg);
9233 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9236 return NULL_TREE;
9238 case BUILT_IN_ISNAN:
9239 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9240 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9242 if (TREE_CODE (arg) == REAL_CST)
9244 r = TREE_REAL_CST (arg);
9245 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9248 arg = builtin_save_expr (arg);
9249 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9251 default:
9252 gcc_unreachable ();
9256 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9257 This builtin will generate code to return the appropriate floating
9258 point classification depending on the value of the floating point
9259 number passed in. The possible return values must be supplied as
9260 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9261 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9262 one floating point argument which is "type generic". */
9264 static tree
9265 fold_builtin_fpclassify (location_t loc, tree exp)
9267 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9268 arg, type, res, tmp;
9269 enum machine_mode mode;
9270 REAL_VALUE_TYPE r;
9271 char buf[128];
9273 /* Verify the required arguments in the original call. */
9274 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9275 INTEGER_TYPE, INTEGER_TYPE,
9276 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9277 return NULL_TREE;
9279 fp_nan = CALL_EXPR_ARG (exp, 0);
9280 fp_infinite = CALL_EXPR_ARG (exp, 1);
9281 fp_normal = CALL_EXPR_ARG (exp, 2);
9282 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9283 fp_zero = CALL_EXPR_ARG (exp, 4);
9284 arg = CALL_EXPR_ARG (exp, 5);
9285 type = TREE_TYPE (arg);
9286 mode = TYPE_MODE (type);
9287 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9289 /* fpclassify(x) ->
9290 isnan(x) ? FP_NAN :
9291 (fabs(x) == Inf ? FP_INFINITE :
9292 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9293 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9295 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9296 build_real (type, dconst0));
9297 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9298 tmp, fp_zero, fp_subnormal);
9300 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9301 real_from_string (&r, buf);
9302 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9303 arg, build_real (type, r));
9304 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9306 if (HONOR_INFINITIES (mode))
9308 real_inf (&r);
9309 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9310 build_real (type, r));
9311 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9312 fp_infinite, res);
9315 if (HONOR_NANS (mode))
9317 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9318 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9321 return res;
9324 /* Fold a call to an unordered comparison function such as
9325 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9326 being called and ARG0 and ARG1 are the arguments for the call.
9327 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9328 the opposite of the desired result. UNORDERED_CODE is used
9329 for modes that can hold NaNs and ORDERED_CODE is used for
9330 the rest. */
9332 static tree
9333 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9334 enum tree_code unordered_code,
9335 enum tree_code ordered_code)
9337 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9338 enum tree_code code;
9339 tree type0, type1;
9340 enum tree_code code0, code1;
9341 tree cmp_type = NULL_TREE;
9343 type0 = TREE_TYPE (arg0);
9344 type1 = TREE_TYPE (arg1);
9346 code0 = TREE_CODE (type0);
9347 code1 = TREE_CODE (type1);
9349 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9350 /* Choose the wider of two real types. */
9351 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9352 ? type0 : type1;
9353 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9354 cmp_type = type0;
9355 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9356 cmp_type = type1;
9358 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9359 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9361 if (unordered_code == UNORDERED_EXPR)
9363 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9364 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9365 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9368 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9369 : ordered_code;
9370 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9371 fold_build2_loc (loc, code, type, arg0, arg1));
9374 /* Fold a call to built-in function FNDECL with 0 arguments.
9375 IGNORE is true if the result of the function call is ignored. This
9376 function returns NULL_TREE if no simplification was possible. */
9378 static tree
9379 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9381 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9382 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9383 switch (fcode)
9385 CASE_FLT_FN (BUILT_IN_INF):
9386 case BUILT_IN_INFD32:
9387 case BUILT_IN_INFD64:
9388 case BUILT_IN_INFD128:
9389 return fold_builtin_inf (loc, type, true);
9391 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9392 return fold_builtin_inf (loc, type, false);
9394 case BUILT_IN_CLASSIFY_TYPE:
9395 return fold_builtin_classify_type (NULL_TREE);
9397 default:
9398 break;
9400 return NULL_TREE;
9403 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9404 IGNORE is true if the result of the function call is ignored. This
9405 function returns NULL_TREE if no simplification was possible. */
9407 static tree
9408 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9410 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9411 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9412 switch (fcode)
9414 case BUILT_IN_CONSTANT_P:
9416 tree val = fold_builtin_constant_p (arg0);
9418 /* Gimplification will pull the CALL_EXPR for the builtin out of
9419 an if condition. When not optimizing, we'll not CSE it back.
9420 To avoid link error types of regressions, return false now. */
9421 if (!val && !optimize)
9422 val = integer_zero_node;
9424 return val;
9427 case BUILT_IN_CLASSIFY_TYPE:
9428 return fold_builtin_classify_type (arg0);
9430 case BUILT_IN_STRLEN:
9431 return fold_builtin_strlen (loc, type, arg0);
9433 CASE_FLT_FN (BUILT_IN_FABS):
9434 return fold_builtin_fabs (loc, arg0, type);
9436 case BUILT_IN_ABS:
9437 case BUILT_IN_LABS:
9438 case BUILT_IN_LLABS:
9439 case BUILT_IN_IMAXABS:
9440 return fold_builtin_abs (loc, arg0, type);
9442 CASE_FLT_FN (BUILT_IN_CONJ):
9443 if (validate_arg (arg0, COMPLEX_TYPE)
9444 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9445 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9446 break;
9448 CASE_FLT_FN (BUILT_IN_CREAL):
9449 if (validate_arg (arg0, COMPLEX_TYPE)
9450 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9451 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9452 break;
9454 CASE_FLT_FN (BUILT_IN_CIMAG):
9455 if (validate_arg (arg0, COMPLEX_TYPE)
9456 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9457 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9458 break;
9460 CASE_FLT_FN (BUILT_IN_CCOS):
9461 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9463 CASE_FLT_FN (BUILT_IN_CCOSH):
9464 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9466 CASE_FLT_FN (BUILT_IN_CPROJ):
9467 return fold_builtin_cproj(loc, arg0, type);
9469 CASE_FLT_FN (BUILT_IN_CSIN):
9470 if (validate_arg (arg0, COMPLEX_TYPE)
9471 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9472 return do_mpc_arg1 (arg0, type, mpc_sin);
9473 break;
9475 CASE_FLT_FN (BUILT_IN_CSINH):
9476 if (validate_arg (arg0, COMPLEX_TYPE)
9477 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9478 return do_mpc_arg1 (arg0, type, mpc_sinh);
9479 break;
9481 CASE_FLT_FN (BUILT_IN_CTAN):
9482 if (validate_arg (arg0, COMPLEX_TYPE)
9483 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9484 return do_mpc_arg1 (arg0, type, mpc_tan);
9485 break;
9487 CASE_FLT_FN (BUILT_IN_CTANH):
9488 if (validate_arg (arg0, COMPLEX_TYPE)
9489 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9490 return do_mpc_arg1 (arg0, type, mpc_tanh);
9491 break;
9493 CASE_FLT_FN (BUILT_IN_CLOG):
9494 if (validate_arg (arg0, COMPLEX_TYPE)
9495 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9496 return do_mpc_arg1 (arg0, type, mpc_log);
9497 break;
9499 CASE_FLT_FN (BUILT_IN_CSQRT):
9500 if (validate_arg (arg0, COMPLEX_TYPE)
9501 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9502 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9503 break;
9505 CASE_FLT_FN (BUILT_IN_CASIN):
9506 if (validate_arg (arg0, COMPLEX_TYPE)
9507 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9508 return do_mpc_arg1 (arg0, type, mpc_asin);
9509 break;
9511 CASE_FLT_FN (BUILT_IN_CACOS):
9512 if (validate_arg (arg0, COMPLEX_TYPE)
9513 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9514 return do_mpc_arg1 (arg0, type, mpc_acos);
9515 break;
9517 CASE_FLT_FN (BUILT_IN_CATAN):
9518 if (validate_arg (arg0, COMPLEX_TYPE)
9519 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9520 return do_mpc_arg1 (arg0, type, mpc_atan);
9521 break;
9523 CASE_FLT_FN (BUILT_IN_CASINH):
9524 if (validate_arg (arg0, COMPLEX_TYPE)
9525 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9526 return do_mpc_arg1 (arg0, type, mpc_asinh);
9527 break;
9529 CASE_FLT_FN (BUILT_IN_CACOSH):
9530 if (validate_arg (arg0, COMPLEX_TYPE)
9531 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9532 return do_mpc_arg1 (arg0, type, mpc_acosh);
9533 break;
9535 CASE_FLT_FN (BUILT_IN_CATANH):
9536 if (validate_arg (arg0, COMPLEX_TYPE)
9537 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9538 return do_mpc_arg1 (arg0, type, mpc_atanh);
9539 break;
9541 CASE_FLT_FN (BUILT_IN_CABS):
9542 return fold_builtin_cabs (loc, arg0, type, fndecl);
9544 CASE_FLT_FN (BUILT_IN_CARG):
9545 return fold_builtin_carg (loc, arg0, type);
9547 CASE_FLT_FN (BUILT_IN_SQRT):
9548 return fold_builtin_sqrt (loc, arg0, type);
9550 CASE_FLT_FN (BUILT_IN_CBRT):
9551 return fold_builtin_cbrt (loc, arg0, type);
9553 CASE_FLT_FN (BUILT_IN_ASIN):
9554 if (validate_arg (arg0, REAL_TYPE))
9555 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9556 &dconstm1, &dconst1, true);
9557 break;
9559 CASE_FLT_FN (BUILT_IN_ACOS):
9560 if (validate_arg (arg0, REAL_TYPE))
9561 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9562 &dconstm1, &dconst1, true);
9563 break;
9565 CASE_FLT_FN (BUILT_IN_ATAN):
9566 if (validate_arg (arg0, REAL_TYPE))
9567 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9568 break;
9570 CASE_FLT_FN (BUILT_IN_ASINH):
9571 if (validate_arg (arg0, REAL_TYPE))
9572 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9573 break;
9575 CASE_FLT_FN (BUILT_IN_ACOSH):
9576 if (validate_arg (arg0, REAL_TYPE))
9577 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9578 &dconst1, NULL, true);
9579 break;
9581 CASE_FLT_FN (BUILT_IN_ATANH):
9582 if (validate_arg (arg0, REAL_TYPE))
9583 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9584 &dconstm1, &dconst1, false);
9585 break;
9587 CASE_FLT_FN (BUILT_IN_SIN):
9588 if (validate_arg (arg0, REAL_TYPE))
9589 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9590 break;
9592 CASE_FLT_FN (BUILT_IN_COS):
9593 return fold_builtin_cos (loc, arg0, type, fndecl);
9595 CASE_FLT_FN (BUILT_IN_TAN):
9596 return fold_builtin_tan (arg0, type);
9598 CASE_FLT_FN (BUILT_IN_CEXP):
9599 return fold_builtin_cexp (loc, arg0, type);
9601 CASE_FLT_FN (BUILT_IN_CEXPI):
9602 if (validate_arg (arg0, REAL_TYPE))
9603 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9604 break;
9606 CASE_FLT_FN (BUILT_IN_SINH):
9607 if (validate_arg (arg0, REAL_TYPE))
9608 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9609 break;
9611 CASE_FLT_FN (BUILT_IN_COSH):
9612 return fold_builtin_cosh (loc, arg0, type, fndecl);
9614 CASE_FLT_FN (BUILT_IN_TANH):
9615 if (validate_arg (arg0, REAL_TYPE))
9616 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9617 break;
9619 CASE_FLT_FN (BUILT_IN_ERF):
9620 if (validate_arg (arg0, REAL_TYPE))
9621 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9622 break;
9624 CASE_FLT_FN (BUILT_IN_ERFC):
9625 if (validate_arg (arg0, REAL_TYPE))
9626 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9627 break;
9629 CASE_FLT_FN (BUILT_IN_TGAMMA):
9630 if (validate_arg (arg0, REAL_TYPE))
9631 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9632 break;
9634 CASE_FLT_FN (BUILT_IN_EXP):
9635 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9637 CASE_FLT_FN (BUILT_IN_EXP2):
9638 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9640 CASE_FLT_FN (BUILT_IN_EXP10):
9641 CASE_FLT_FN (BUILT_IN_POW10):
9642 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9644 CASE_FLT_FN (BUILT_IN_EXPM1):
9645 if (validate_arg (arg0, REAL_TYPE))
9646 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9647 break;
9649 CASE_FLT_FN (BUILT_IN_LOG):
9650 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9652 CASE_FLT_FN (BUILT_IN_LOG2):
9653 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9655 CASE_FLT_FN (BUILT_IN_LOG10):
9656 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9658 CASE_FLT_FN (BUILT_IN_LOG1P):
9659 if (validate_arg (arg0, REAL_TYPE))
9660 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9661 &dconstm1, NULL, false);
9662 break;
9664 CASE_FLT_FN (BUILT_IN_J0):
9665 if (validate_arg (arg0, REAL_TYPE))
9666 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9667 NULL, NULL, 0);
9668 break;
9670 CASE_FLT_FN (BUILT_IN_J1):
9671 if (validate_arg (arg0, REAL_TYPE))
9672 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9673 NULL, NULL, 0);
9674 break;
9676 CASE_FLT_FN (BUILT_IN_Y0):
9677 if (validate_arg (arg0, REAL_TYPE))
9678 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9679 &dconst0, NULL, false);
9680 break;
9682 CASE_FLT_FN (BUILT_IN_Y1):
9683 if (validate_arg (arg0, REAL_TYPE))
9684 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9685 &dconst0, NULL, false);
9686 break;
9688 CASE_FLT_FN (BUILT_IN_NAN):
9689 case BUILT_IN_NAND32:
9690 case BUILT_IN_NAND64:
9691 case BUILT_IN_NAND128:
9692 return fold_builtin_nan (arg0, type, true);
9694 CASE_FLT_FN (BUILT_IN_NANS):
9695 return fold_builtin_nan (arg0, type, false);
9697 CASE_FLT_FN (BUILT_IN_FLOOR):
9698 return fold_builtin_floor (loc, fndecl, arg0);
9700 CASE_FLT_FN (BUILT_IN_CEIL):
9701 return fold_builtin_ceil (loc, fndecl, arg0);
9703 CASE_FLT_FN (BUILT_IN_TRUNC):
9704 return fold_builtin_trunc (loc, fndecl, arg0);
9706 CASE_FLT_FN (BUILT_IN_ROUND):
9707 return fold_builtin_round (loc, fndecl, arg0);
9709 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9710 CASE_FLT_FN (BUILT_IN_RINT):
9711 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9713 CASE_FLT_FN (BUILT_IN_LCEIL):
9714 CASE_FLT_FN (BUILT_IN_LLCEIL):
9715 CASE_FLT_FN (BUILT_IN_LFLOOR):
9716 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9717 CASE_FLT_FN (BUILT_IN_LROUND):
9718 CASE_FLT_FN (BUILT_IN_LLROUND):
9719 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9721 CASE_FLT_FN (BUILT_IN_LRINT):
9722 CASE_FLT_FN (BUILT_IN_LLRINT):
9723 return fold_fixed_mathfn (loc, fndecl, arg0);
9725 case BUILT_IN_BSWAP32:
9726 case BUILT_IN_BSWAP64:
9727 return fold_builtin_bswap (fndecl, arg0);
9729 CASE_INT_FN (BUILT_IN_FFS):
9730 CASE_INT_FN (BUILT_IN_CLZ):
9731 CASE_INT_FN (BUILT_IN_CTZ):
9732 CASE_INT_FN (BUILT_IN_POPCOUNT):
9733 CASE_INT_FN (BUILT_IN_PARITY):
9734 return fold_builtin_bitop (fndecl, arg0);
9736 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9737 return fold_builtin_signbit (loc, arg0, type);
9739 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9740 return fold_builtin_significand (loc, arg0, type);
9742 CASE_FLT_FN (BUILT_IN_ILOGB):
9743 CASE_FLT_FN (BUILT_IN_LOGB):
9744 return fold_builtin_logb (loc, arg0, type);
9746 case BUILT_IN_ISASCII:
9747 return fold_builtin_isascii (loc, arg0);
9749 case BUILT_IN_TOASCII:
9750 return fold_builtin_toascii (loc, arg0);
9752 case BUILT_IN_ISDIGIT:
9753 return fold_builtin_isdigit (loc, arg0);
9755 CASE_FLT_FN (BUILT_IN_FINITE):
9756 case BUILT_IN_FINITED32:
9757 case BUILT_IN_FINITED64:
9758 case BUILT_IN_FINITED128:
9759 case BUILT_IN_ISFINITE:
9761 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9762 if (ret)
9763 return ret;
9764 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9767 CASE_FLT_FN (BUILT_IN_ISINF):
9768 case BUILT_IN_ISINFD32:
9769 case BUILT_IN_ISINFD64:
9770 case BUILT_IN_ISINFD128:
9772 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9773 if (ret)
9774 return ret;
9775 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9778 case BUILT_IN_ISNORMAL:
9779 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9781 case BUILT_IN_ISINF_SIGN:
9782 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9784 CASE_FLT_FN (BUILT_IN_ISNAN):
9785 case BUILT_IN_ISNAND32:
9786 case BUILT_IN_ISNAND64:
9787 case BUILT_IN_ISNAND128:
9788 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9790 case BUILT_IN_PRINTF:
9791 case BUILT_IN_PRINTF_UNLOCKED:
9792 case BUILT_IN_VPRINTF:
9793 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
9795 case BUILT_IN_FREE:
9796 if (integer_zerop (arg0))
9797 return build_empty_stmt (loc);
9798 break;
9800 default:
9801 break;
9804 return NULL_TREE;
9808 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9809 IGNORE is true if the result of the function call is ignored. This
9810 function returns NULL_TREE if no simplification was possible. */
9812 static tree
9813 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
9815 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9816 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9818 switch (fcode)
9820 CASE_FLT_FN (BUILT_IN_JN):
9821 if (validate_arg (arg0, INTEGER_TYPE)
9822 && validate_arg (arg1, REAL_TYPE))
9823 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9824 break;
9826 CASE_FLT_FN (BUILT_IN_YN):
9827 if (validate_arg (arg0, INTEGER_TYPE)
9828 && validate_arg (arg1, REAL_TYPE))
9829 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9830 &dconst0, false);
9831 break;
9833 CASE_FLT_FN (BUILT_IN_DREM):
9834 CASE_FLT_FN (BUILT_IN_REMAINDER):
9835 if (validate_arg (arg0, REAL_TYPE)
9836 && validate_arg(arg1, REAL_TYPE))
9837 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9838 break;
9840 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9841 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9842 if (validate_arg (arg0, REAL_TYPE)
9843 && validate_arg(arg1, POINTER_TYPE))
9844 return do_mpfr_lgamma_r (arg0, arg1, type);
9845 break;
9847 CASE_FLT_FN (BUILT_IN_ATAN2):
9848 if (validate_arg (arg0, REAL_TYPE)
9849 && validate_arg(arg1, REAL_TYPE))
9850 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9851 break;
9853 CASE_FLT_FN (BUILT_IN_FDIM):
9854 if (validate_arg (arg0, REAL_TYPE)
9855 && validate_arg(arg1, REAL_TYPE))
9856 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9857 break;
9859 CASE_FLT_FN (BUILT_IN_HYPOT):
9860 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
9862 CASE_FLT_FN (BUILT_IN_CPOW):
9863 if (validate_arg (arg0, COMPLEX_TYPE)
9864 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9865 && validate_arg (arg1, COMPLEX_TYPE)
9866 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
9867 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
9868 break;
9870 CASE_FLT_FN (BUILT_IN_LDEXP):
9871 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
9872 CASE_FLT_FN (BUILT_IN_SCALBN):
9873 CASE_FLT_FN (BUILT_IN_SCALBLN):
9874 return fold_builtin_load_exponent (loc, arg0, arg1,
9875 type, /*ldexp=*/false);
9877 CASE_FLT_FN (BUILT_IN_FREXP):
9878 return fold_builtin_frexp (loc, arg0, arg1, type);
9880 CASE_FLT_FN (BUILT_IN_MODF):
9881 return fold_builtin_modf (loc, arg0, arg1, type);
9883 case BUILT_IN_BZERO:
9884 return fold_builtin_bzero (loc, arg0, arg1, ignore);
9886 case BUILT_IN_FPUTS:
9887 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
9889 case BUILT_IN_FPUTS_UNLOCKED:
9890 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
9892 case BUILT_IN_STRSTR:
9893 return fold_builtin_strstr (loc, arg0, arg1, type);
9895 case BUILT_IN_STRCAT:
9896 return fold_builtin_strcat (loc, arg0, arg1);
9898 case BUILT_IN_STRSPN:
9899 return fold_builtin_strspn (loc, arg0, arg1);
9901 case BUILT_IN_STRCSPN:
9902 return fold_builtin_strcspn (loc, arg0, arg1);
9904 case BUILT_IN_STRCHR:
9905 case BUILT_IN_INDEX:
9906 return fold_builtin_strchr (loc, arg0, arg1, type);
9908 case BUILT_IN_STRRCHR:
9909 case BUILT_IN_RINDEX:
9910 return fold_builtin_strrchr (loc, arg0, arg1, type);
9912 case BUILT_IN_STRCPY:
9913 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
9915 case BUILT_IN_STPCPY:
9916 if (ignore)
9918 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
9919 if (!fn)
9920 break;
9922 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
9924 else
9925 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
9926 break;
9928 case BUILT_IN_STRCMP:
9929 return fold_builtin_strcmp (loc, arg0, arg1);
9931 case BUILT_IN_STRPBRK:
9932 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9934 case BUILT_IN_EXPECT:
9935 return fold_builtin_expect (loc, arg0, arg1);
9937 CASE_FLT_FN (BUILT_IN_POW):
9938 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
9940 CASE_FLT_FN (BUILT_IN_POWI):
9941 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
9943 CASE_FLT_FN (BUILT_IN_COPYSIGN):
9944 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
9946 CASE_FLT_FN (BUILT_IN_FMIN):
9947 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
9949 CASE_FLT_FN (BUILT_IN_FMAX):
9950 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
9952 case BUILT_IN_ISGREATER:
9953 return fold_builtin_unordered_cmp (loc, fndecl,
9954 arg0, arg1, UNLE_EXPR, LE_EXPR);
9955 case BUILT_IN_ISGREATEREQUAL:
9956 return fold_builtin_unordered_cmp (loc, fndecl,
9957 arg0, arg1, UNLT_EXPR, LT_EXPR);
9958 case BUILT_IN_ISLESS:
9959 return fold_builtin_unordered_cmp (loc, fndecl,
9960 arg0, arg1, UNGE_EXPR, GE_EXPR);
9961 case BUILT_IN_ISLESSEQUAL:
9962 return fold_builtin_unordered_cmp (loc, fndecl,
9963 arg0, arg1, UNGT_EXPR, GT_EXPR);
9964 case BUILT_IN_ISLESSGREATER:
9965 return fold_builtin_unordered_cmp (loc, fndecl,
9966 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9967 case BUILT_IN_ISUNORDERED:
9968 return fold_builtin_unordered_cmp (loc, fndecl,
9969 arg0, arg1, UNORDERED_EXPR,
9970 NOP_EXPR);
9972 /* We do the folding for va_start in the expander. */
9973 case BUILT_IN_VA_START:
9974 break;
9976 case BUILT_IN_SPRINTF:
9977 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
9979 case BUILT_IN_OBJECT_SIZE:
9980 return fold_builtin_object_size (arg0, arg1);
9982 case BUILT_IN_PRINTF:
9983 case BUILT_IN_PRINTF_UNLOCKED:
9984 case BUILT_IN_VPRINTF:
9985 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
9987 case BUILT_IN_PRINTF_CHK:
9988 case BUILT_IN_VPRINTF_CHK:
9989 if (!validate_arg (arg0, INTEGER_TYPE)
9990 || TREE_SIDE_EFFECTS (arg0))
9991 return NULL_TREE;
9992 else
9993 return fold_builtin_printf (loc, fndecl,
9994 arg1, NULL_TREE, ignore, fcode);
9995 break;
9997 case BUILT_IN_FPRINTF:
9998 case BUILT_IN_FPRINTF_UNLOCKED:
9999 case BUILT_IN_VFPRINTF:
10000 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10001 ignore, fcode);
10003 default:
10004 break;
10006 return NULL_TREE;
10009 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10010 and ARG2. IGNORE is true if the result of the function call is ignored.
10011 This function returns NULL_TREE if no simplification was possible. */
10013 static tree
10014 fold_builtin_3 (location_t loc, tree fndecl,
10015 tree arg0, tree arg1, tree arg2, bool ignore)
10017 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10018 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10019 switch (fcode)
10022 CASE_FLT_FN (BUILT_IN_SINCOS):
10023 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10025 CASE_FLT_FN (BUILT_IN_FMA):
10026 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10027 break;
10029 CASE_FLT_FN (BUILT_IN_REMQUO):
10030 if (validate_arg (arg0, REAL_TYPE)
10031 && validate_arg(arg1, REAL_TYPE)
10032 && validate_arg(arg2, POINTER_TYPE))
10033 return do_mpfr_remquo (arg0, arg1, arg2);
10034 break;
10036 case BUILT_IN_MEMSET:
10037 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10039 case BUILT_IN_BCOPY:
10040 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10041 void_type_node, true, /*endp=*/3);
10043 case BUILT_IN_MEMCPY:
10044 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10045 type, ignore, /*endp=*/0);
10047 case BUILT_IN_MEMPCPY:
10048 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10049 type, ignore, /*endp=*/1);
10051 case BUILT_IN_MEMMOVE:
10052 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10053 type, ignore, /*endp=*/3);
10055 case BUILT_IN_STRNCAT:
10056 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10058 case BUILT_IN_STRNCPY:
10059 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10061 case BUILT_IN_STRNCMP:
10062 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10064 case BUILT_IN_MEMCHR:
10065 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10067 case BUILT_IN_BCMP:
10068 case BUILT_IN_MEMCMP:
10069 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10071 case BUILT_IN_SPRINTF:
10072 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10074 case BUILT_IN_SNPRINTF:
10075 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10077 case BUILT_IN_STRCPY_CHK:
10078 case BUILT_IN_STPCPY_CHK:
10079 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10080 ignore, fcode);
10082 case BUILT_IN_STRCAT_CHK:
10083 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10085 case BUILT_IN_PRINTF_CHK:
10086 case BUILT_IN_VPRINTF_CHK:
10087 if (!validate_arg (arg0, INTEGER_TYPE)
10088 || TREE_SIDE_EFFECTS (arg0))
10089 return NULL_TREE;
10090 else
10091 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10092 break;
10094 case BUILT_IN_FPRINTF:
10095 case BUILT_IN_FPRINTF_UNLOCKED:
10096 case BUILT_IN_VFPRINTF:
10097 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10098 ignore, fcode);
10100 case BUILT_IN_FPRINTF_CHK:
10101 case BUILT_IN_VFPRINTF_CHK:
10102 if (!validate_arg (arg1, INTEGER_TYPE)
10103 || TREE_SIDE_EFFECTS (arg1))
10104 return NULL_TREE;
10105 else
10106 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10107 ignore, fcode);
10109 default:
10110 break;
10112 return NULL_TREE;
10115 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10116 ARG2, and ARG3. IGNORE is true if the result of the function call is
10117 ignored. This function returns NULL_TREE if no simplification was
10118 possible. */
10120 static tree
10121 fold_builtin_4 (location_t loc, tree fndecl,
10122 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10124 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10126 switch (fcode)
10128 case BUILT_IN_MEMCPY_CHK:
10129 case BUILT_IN_MEMPCPY_CHK:
10130 case BUILT_IN_MEMMOVE_CHK:
10131 case BUILT_IN_MEMSET_CHK:
10132 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10133 NULL_TREE, ignore,
10134 DECL_FUNCTION_CODE (fndecl));
10136 case BUILT_IN_STRNCPY_CHK:
10137 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10139 case BUILT_IN_STRNCAT_CHK:
10140 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10142 case BUILT_IN_SNPRINTF:
10143 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10145 case BUILT_IN_FPRINTF_CHK:
10146 case BUILT_IN_VFPRINTF_CHK:
10147 if (!validate_arg (arg1, INTEGER_TYPE)
10148 || TREE_SIDE_EFFECTS (arg1))
10149 return NULL_TREE;
10150 else
10151 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10152 ignore, fcode);
10153 break;
10155 default:
10156 break;
10158 return NULL_TREE;
10161 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10162 arguments, where NARGS <= 4. IGNORE is true if the result of the
10163 function call is ignored. This function returns NULL_TREE if no
10164 simplification was possible. Note that this only folds builtins with
10165 fixed argument patterns. Foldings that do varargs-to-varargs
10166 transformations, or that match calls with more than 4 arguments,
10167 need to be handled with fold_builtin_varargs instead. */
10169 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10171 static tree
10172 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10174 tree ret = NULL_TREE;
10176 switch (nargs)
10178 case 0:
10179 ret = fold_builtin_0 (loc, fndecl, ignore);
10180 break;
10181 case 1:
10182 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10183 break;
10184 case 2:
10185 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10186 break;
10187 case 3:
10188 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10189 break;
10190 case 4:
10191 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10192 ignore);
10193 break;
10194 default:
10195 break;
10197 if (ret)
10199 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10200 SET_EXPR_LOCATION (ret, loc);
10201 TREE_NO_WARNING (ret) = 1;
10202 return ret;
10204 return NULL_TREE;
10207 /* Builtins with folding operations that operate on "..." arguments
10208 need special handling; we need to store the arguments in a convenient
10209 data structure before attempting any folding. Fortunately there are
10210 only a few builtins that fall into this category. FNDECL is the
10211 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10212 result of the function call is ignored. */
10214 static tree
10215 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10216 bool ignore ATTRIBUTE_UNUSED)
10218 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10219 tree ret = NULL_TREE;
10221 switch (fcode)
10223 case BUILT_IN_SPRINTF_CHK:
10224 case BUILT_IN_VSPRINTF_CHK:
10225 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10226 break;
10228 case BUILT_IN_SNPRINTF_CHK:
10229 case BUILT_IN_VSNPRINTF_CHK:
10230 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10231 break;
10233 case BUILT_IN_FPCLASSIFY:
10234 ret = fold_builtin_fpclassify (loc, exp);
10235 break;
10237 default:
10238 break;
10240 if (ret)
10242 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10243 SET_EXPR_LOCATION (ret, loc);
10244 TREE_NO_WARNING (ret) = 1;
10245 return ret;
10247 return NULL_TREE;
10250 /* Return true if FNDECL shouldn't be folded right now.
10251 If a built-in function has an inline attribute always_inline
10252 wrapper, defer folding it after always_inline functions have
10253 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10254 might not be performed. */
10256 static bool
10257 avoid_folding_inline_builtin (tree fndecl)
10259 return (DECL_DECLARED_INLINE_P (fndecl)
10260 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10261 && cfun
10262 && !cfun->always_inline_functions_inlined
10263 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10266 /* A wrapper function for builtin folding that prevents warnings for
10267 "statement without effect" and the like, caused by removing the
10268 call node earlier than the warning is generated. */
10270 tree
10271 fold_call_expr (location_t loc, tree exp, bool ignore)
10273 tree ret = NULL_TREE;
10274 tree fndecl = get_callee_fndecl (exp);
10275 if (fndecl
10276 && TREE_CODE (fndecl) == FUNCTION_DECL
10277 && DECL_BUILT_IN (fndecl)
10278 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10279 yet. Defer folding until we see all the arguments
10280 (after inlining). */
10281 && !CALL_EXPR_VA_ARG_PACK (exp))
10283 int nargs = call_expr_nargs (exp);
10285 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10286 instead last argument is __builtin_va_arg_pack (). Defer folding
10287 even in that case, until arguments are finalized. */
10288 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10290 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10291 if (fndecl2
10292 && TREE_CODE (fndecl2) == FUNCTION_DECL
10293 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10294 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10295 return NULL_TREE;
10298 if (avoid_folding_inline_builtin (fndecl))
10299 return NULL_TREE;
10301 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10302 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10303 CALL_EXPR_ARGP (exp), ignore);
10304 else
10306 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10308 tree *args = CALL_EXPR_ARGP (exp);
10309 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10311 if (!ret)
10312 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10313 if (ret)
10314 return ret;
10317 return NULL_TREE;
10320 /* Conveniently construct a function call expression. FNDECL names the
10321 function to be called and N arguments are passed in the array
10322 ARGARRAY. */
10324 tree
10325 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10327 tree fntype = TREE_TYPE (fndecl);
10328 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10330 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10333 /* Conveniently construct a function call expression. FNDECL names the
10334 function to be called and the arguments are passed in the vector
10335 VEC. */
10337 tree
10338 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10340 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10341 VEC_address (tree, vec));
10345 /* Conveniently construct a function call expression. FNDECL names the
10346 function to be called, N is the number of arguments, and the "..."
10347 parameters are the argument expressions. */
10349 tree
10350 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10352 va_list ap;
10353 tree *argarray = XALLOCAVEC (tree, n);
10354 int i;
10356 va_start (ap, n);
10357 for (i = 0; i < n; i++)
10358 argarray[i] = va_arg (ap, tree);
10359 va_end (ap);
10360 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10363 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10364 varargs macros aren't supported by all bootstrap compilers. */
10366 tree
10367 build_call_expr (tree fndecl, int n, ...)
10369 va_list ap;
10370 tree *argarray = XALLOCAVEC (tree, n);
10371 int i;
10373 va_start (ap, n);
10374 for (i = 0; i < n; i++)
10375 argarray[i] = va_arg (ap, tree);
10376 va_end (ap);
10377 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10380 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10381 N arguments are passed in the array ARGARRAY. */
10383 tree
10384 fold_builtin_call_array (location_t loc, tree type,
10385 tree fn,
10386 int n,
10387 tree *argarray)
10389 tree ret = NULL_TREE;
10390 tree exp;
10392 if (TREE_CODE (fn) == ADDR_EXPR)
10394 tree fndecl = TREE_OPERAND (fn, 0);
10395 if (TREE_CODE (fndecl) == FUNCTION_DECL
10396 && DECL_BUILT_IN (fndecl))
10398 /* If last argument is __builtin_va_arg_pack (), arguments to this
10399 function are not finalized yet. Defer folding until they are. */
10400 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10402 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10403 if (fndecl2
10404 && TREE_CODE (fndecl2) == FUNCTION_DECL
10405 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10406 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10407 return build_call_array_loc (loc, type, fn, n, argarray);
10409 if (avoid_folding_inline_builtin (fndecl))
10410 return build_call_array_loc (loc, type, fn, n, argarray);
10411 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10413 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10414 if (ret)
10415 return ret;
10417 return build_call_array_loc (loc, type, fn, n, argarray);
10419 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10421 /* First try the transformations that don't require consing up
10422 an exp. */
10423 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10424 if (ret)
10425 return ret;
10428 /* If we got this far, we need to build an exp. */
10429 exp = build_call_array_loc (loc, type, fn, n, argarray);
10430 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10431 return ret ? ret : exp;
10435 return build_call_array_loc (loc, type, fn, n, argarray);
10438 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10439 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10440 of arguments in ARGS to be omitted. OLDNARGS is the number of
10441 elements in ARGS. */
10443 static tree
10444 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10445 int skip, tree fndecl, int n, va_list newargs)
10447 int nargs = oldnargs - skip + n;
10448 tree *buffer;
10450 if (n > 0)
10452 int i, j;
10454 buffer = XALLOCAVEC (tree, nargs);
10455 for (i = 0; i < n; i++)
10456 buffer[i] = va_arg (newargs, tree);
10457 for (j = skip; j < oldnargs; j++, i++)
10458 buffer[i] = args[j];
10460 else
10461 buffer = args + skip;
10463 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10466 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10467 list ARGS along with N new arguments specified as the "..."
10468 parameters. SKIP is the number of arguments in ARGS to be omitted.
10469 OLDNARGS is the number of elements in ARGS. */
10471 static tree
10472 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
10473 int skip, tree fndecl, int n, ...)
10475 va_list ap;
10476 tree t;
10478 va_start (ap, n);
10479 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
10480 va_end (ap);
10482 return t;
10485 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10486 along with N new arguments specified as the "..." parameters. SKIP
10487 is the number of arguments in EXP to be omitted. This function is used
10488 to do varargs-to-varargs transformations. */
10490 static tree
10491 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10493 va_list ap;
10494 tree t;
10496 va_start (ap, n);
10497 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10498 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10499 va_end (ap);
10501 return t;
10504 /* Validate a single argument ARG against a tree code CODE representing
10505 a type. */
10507 static bool
10508 validate_arg (const_tree arg, enum tree_code code)
10510 if (!arg)
10511 return false;
10512 else if (code == POINTER_TYPE)
10513 return POINTER_TYPE_P (TREE_TYPE (arg));
10514 else if (code == INTEGER_TYPE)
10515 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10516 return code == TREE_CODE (TREE_TYPE (arg));
10519 /* This function validates the types of a function call argument list
10520 against a specified list of tree_codes. If the last specifier is a 0,
10521 that represents an ellipses, otherwise the last specifier must be a
10522 VOID_TYPE.
10524 This is the GIMPLE version of validate_arglist. Eventually we want to
10525 completely convert builtins.c to work from GIMPLEs and the tree based
10526 validate_arglist will then be removed. */
10528 bool
10529 validate_gimple_arglist (const_gimple call, ...)
10531 enum tree_code code;
10532 bool res = 0;
10533 va_list ap;
10534 const_tree arg;
10535 size_t i;
10537 va_start (ap, call);
10538 i = 0;
10542 code = (enum tree_code) va_arg (ap, int);
10543 switch (code)
10545 case 0:
10546 /* This signifies an ellipses, any further arguments are all ok. */
10547 res = true;
10548 goto end;
10549 case VOID_TYPE:
10550 /* This signifies an endlink, if no arguments remain, return
10551 true, otherwise return false. */
10552 res = (i == gimple_call_num_args (call));
10553 goto end;
10554 default:
10555 /* If no parameters remain or the parameter's code does not
10556 match the specified code, return false. Otherwise continue
10557 checking any remaining arguments. */
10558 arg = gimple_call_arg (call, i++);
10559 if (!validate_arg (arg, code))
10560 goto end;
10561 break;
10564 while (1);
10566 /* We need gotos here since we can only have one VA_CLOSE in a
10567 function. */
10568 end: ;
10569 va_end (ap);
10571 return res;
10574 /* This function validates the types of a function call argument list
10575 against a specified list of tree_codes. If the last specifier is a 0,
10576 that represents an ellipses, otherwise the last specifier must be a
10577 VOID_TYPE. */
10579 bool
10580 validate_arglist (const_tree callexpr, ...)
10582 enum tree_code code;
10583 bool res = 0;
10584 va_list ap;
10585 const_call_expr_arg_iterator iter;
10586 const_tree arg;
10588 va_start (ap, callexpr);
10589 init_const_call_expr_arg_iterator (callexpr, &iter);
10593 code = (enum tree_code) va_arg (ap, int);
10594 switch (code)
10596 case 0:
10597 /* This signifies an ellipses, any further arguments are all ok. */
10598 res = true;
10599 goto end;
10600 case VOID_TYPE:
10601 /* This signifies an endlink, if no arguments remain, return
10602 true, otherwise return false. */
10603 res = !more_const_call_expr_args_p (&iter);
10604 goto end;
10605 default:
10606 /* If no parameters remain or the parameter's code does not
10607 match the specified code, return false. Otherwise continue
10608 checking any remaining arguments. */
10609 arg = next_const_call_expr_arg (&iter);
10610 if (!validate_arg (arg, code))
10611 goto end;
10612 break;
10615 while (1);
10617 /* We need gotos here since we can only have one VA_CLOSE in a
10618 function. */
10619 end: ;
10620 va_end (ap);
10622 return res;
10625 /* Default target-specific builtin expander that does nothing. */
10628 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10629 rtx target ATTRIBUTE_UNUSED,
10630 rtx subtarget ATTRIBUTE_UNUSED,
10631 enum machine_mode mode ATTRIBUTE_UNUSED,
10632 int ignore ATTRIBUTE_UNUSED)
10634 return NULL_RTX;
10637 /* Returns true is EXP represents data that would potentially reside
10638 in a readonly section. */
10640 static bool
10641 readonly_data_expr (tree exp)
10643 STRIP_NOPS (exp);
10645 if (TREE_CODE (exp) != ADDR_EXPR)
10646 return false;
10648 exp = get_base_address (TREE_OPERAND (exp, 0));
10649 if (!exp)
10650 return false;
10652 /* Make sure we call decl_readonly_section only for trees it
10653 can handle (since it returns true for everything it doesn't
10654 understand). */
10655 if (TREE_CODE (exp) == STRING_CST
10656 || TREE_CODE (exp) == CONSTRUCTOR
10657 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10658 return decl_readonly_section (exp, 0);
10659 else
10660 return false;
10663 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10664 to the call, and TYPE is its return type.
10666 Return NULL_TREE if no simplification was possible, otherwise return the
10667 simplified form of the call as a tree.
10669 The simplified form may be a constant or other expression which
10670 computes the same value, but in a more efficient manner (including
10671 calls to other builtin functions).
10673 The call may contain arguments which need to be evaluated, but
10674 which are not useful to determine the result of the call. In
10675 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10676 COMPOUND_EXPR will be an argument which must be evaluated.
10677 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10678 COMPOUND_EXPR in the chain will contain the tree for the simplified
10679 form of the builtin function call. */
10681 static tree
10682 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10684 if (!validate_arg (s1, POINTER_TYPE)
10685 || !validate_arg (s2, POINTER_TYPE))
10686 return NULL_TREE;
10687 else
10689 tree fn;
10690 const char *p1, *p2;
10692 p2 = c_getstr (s2);
10693 if (p2 == NULL)
10694 return NULL_TREE;
10696 p1 = c_getstr (s1);
10697 if (p1 != NULL)
10699 const char *r = strstr (p1, p2);
10700 tree tem;
10702 if (r == NULL)
10703 return build_int_cst (TREE_TYPE (s1), 0);
10705 /* Return an offset into the constant string argument. */
10706 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10707 s1, size_int (r - p1));
10708 return fold_convert_loc (loc, type, tem);
10711 /* The argument is const char *, and the result is char *, so we need
10712 a type conversion here to avoid a warning. */
10713 if (p2[0] == '\0')
10714 return fold_convert_loc (loc, type, s1);
10716 if (p2[1] != '\0')
10717 return NULL_TREE;
10719 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10720 if (!fn)
10721 return NULL_TREE;
10723 /* New argument list transforming strstr(s1, s2) to
10724 strchr(s1, s2[0]). */
10725 return build_call_expr_loc (loc, fn, 2, s1,
10726 build_int_cst (integer_type_node, p2[0]));
10730 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10731 the call, and TYPE is its return type.
10733 Return NULL_TREE if no simplification was possible, otherwise return the
10734 simplified form of the call as a tree.
10736 The simplified form may be a constant or other expression which
10737 computes the same value, but in a more efficient manner (including
10738 calls to other builtin functions).
10740 The call may contain arguments which need to be evaluated, but
10741 which are not useful to determine the result of the call. In
10742 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10743 COMPOUND_EXPR will be an argument which must be evaluated.
10744 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10745 COMPOUND_EXPR in the chain will contain the tree for the simplified
10746 form of the builtin function call. */
10748 static tree
10749 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10751 if (!validate_arg (s1, POINTER_TYPE)
10752 || !validate_arg (s2, INTEGER_TYPE))
10753 return NULL_TREE;
10754 else
10756 const char *p1;
10758 if (TREE_CODE (s2) != INTEGER_CST)
10759 return NULL_TREE;
10761 p1 = c_getstr (s1);
10762 if (p1 != NULL)
10764 char c;
10765 const char *r;
10766 tree tem;
10768 if (target_char_cast (s2, &c))
10769 return NULL_TREE;
10771 r = strchr (p1, c);
10773 if (r == NULL)
10774 return build_int_cst (TREE_TYPE (s1), 0);
10776 /* Return an offset into the constant string argument. */
10777 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10778 s1, size_int (r - p1));
10779 return fold_convert_loc (loc, type, tem);
10781 return NULL_TREE;
10785 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10786 the call, and TYPE is its return type.
10788 Return NULL_TREE if no simplification was possible, otherwise return the
10789 simplified form of the call as a tree.
10791 The simplified form may be a constant or other expression which
10792 computes the same value, but in a more efficient manner (including
10793 calls to other builtin functions).
10795 The call may contain arguments which need to be evaluated, but
10796 which are not useful to determine the result of the call. In
10797 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10798 COMPOUND_EXPR will be an argument which must be evaluated.
10799 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10800 COMPOUND_EXPR in the chain will contain the tree for the simplified
10801 form of the builtin function call. */
10803 static tree
10804 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10806 if (!validate_arg (s1, POINTER_TYPE)
10807 || !validate_arg (s2, INTEGER_TYPE))
10808 return NULL_TREE;
10809 else
10811 tree fn;
10812 const char *p1;
10814 if (TREE_CODE (s2) != INTEGER_CST)
10815 return NULL_TREE;
10817 p1 = c_getstr (s1);
10818 if (p1 != NULL)
10820 char c;
10821 const char *r;
10822 tree tem;
10824 if (target_char_cast (s2, &c))
10825 return NULL_TREE;
10827 r = strrchr (p1, c);
10829 if (r == NULL)
10830 return build_int_cst (TREE_TYPE (s1), 0);
10832 /* Return an offset into the constant string argument. */
10833 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10834 s1, size_int (r - p1));
10835 return fold_convert_loc (loc, type, tem);
10838 if (! integer_zerop (s2))
10839 return NULL_TREE;
10841 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10842 if (!fn)
10843 return NULL_TREE;
10845 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10846 return build_call_expr_loc (loc, fn, 2, s1, s2);
10850 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10851 to the call, and TYPE is its return type.
10853 Return NULL_TREE if no simplification was possible, otherwise return the
10854 simplified form of the call as a tree.
10856 The simplified form may be a constant or other expression which
10857 computes the same value, but in a more efficient manner (including
10858 calls to other builtin functions).
10860 The call may contain arguments which need to be evaluated, but
10861 which are not useful to determine the result of the call. In
10862 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10863 COMPOUND_EXPR will be an argument which must be evaluated.
10864 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10865 COMPOUND_EXPR in the chain will contain the tree for the simplified
10866 form of the builtin function call. */
10868 static tree
10869 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10871 if (!validate_arg (s1, POINTER_TYPE)
10872 || !validate_arg (s2, POINTER_TYPE))
10873 return NULL_TREE;
10874 else
10876 tree fn;
10877 const char *p1, *p2;
10879 p2 = c_getstr (s2);
10880 if (p2 == NULL)
10881 return NULL_TREE;
10883 p1 = c_getstr (s1);
10884 if (p1 != NULL)
10886 const char *r = strpbrk (p1, p2);
10887 tree tem;
10889 if (r == NULL)
10890 return build_int_cst (TREE_TYPE (s1), 0);
10892 /* Return an offset into the constant string argument. */
10893 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10894 s1, size_int (r - p1));
10895 return fold_convert_loc (loc, type, tem);
10898 if (p2[0] == '\0')
10899 /* strpbrk(x, "") == NULL.
10900 Evaluate and ignore s1 in case it had side-effects. */
10901 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10903 if (p2[1] != '\0')
10904 return NULL_TREE; /* Really call strpbrk. */
10906 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10907 if (!fn)
10908 return NULL_TREE;
10910 /* New argument list transforming strpbrk(s1, s2) to
10911 strchr(s1, s2[0]). */
10912 return build_call_expr_loc (loc, fn, 2, s1,
10913 build_int_cst (integer_type_node, p2[0]));
10917 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10918 to the call.
10920 Return NULL_TREE if no simplification was possible, otherwise return the
10921 simplified form of the call as a tree.
10923 The simplified form may be a constant or other expression which
10924 computes the same value, but in a more efficient manner (including
10925 calls to other builtin functions).
10927 The call may contain arguments which need to be evaluated, but
10928 which are not useful to determine the result of the call. In
10929 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10930 COMPOUND_EXPR will be an argument which must be evaluated.
10931 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10932 COMPOUND_EXPR in the chain will contain the tree for the simplified
10933 form of the builtin function call. */
10935 static tree
10936 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
10938 if (!validate_arg (dst, POINTER_TYPE)
10939 || !validate_arg (src, POINTER_TYPE))
10940 return NULL_TREE;
10941 else
10943 const char *p = c_getstr (src);
10945 /* If the string length is zero, return the dst parameter. */
10946 if (p && *p == '\0')
10947 return dst;
10949 if (optimize_insn_for_speed_p ())
10951 /* See if we can store by pieces into (dst + strlen(dst)). */
10952 tree newdst, call;
10953 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
10954 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10956 if (!strlen_fn || !strcpy_fn)
10957 return NULL_TREE;
10959 /* If we don't have a movstr we don't want to emit an strcpy
10960 call. We have to do that if the length of the source string
10961 isn't computable (in that case we can use memcpy probably
10962 later expanding to a sequence of mov instructions). If we
10963 have movstr instructions we can emit strcpy calls. */
10964 if (!HAVE_movstr)
10966 tree len = c_strlen (src, 1);
10967 if (! len || TREE_SIDE_EFFECTS (len))
10968 return NULL_TREE;
10971 /* Stabilize the argument list. */
10972 dst = builtin_save_expr (dst);
10974 /* Create strlen (dst). */
10975 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
10976 /* Create (dst p+ strlen (dst)). */
10978 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
10979 TREE_TYPE (dst), dst, newdst);
10980 newdst = builtin_save_expr (newdst);
10982 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
10983 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
10985 return NULL_TREE;
10989 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10990 arguments to the call.
10992 Return NULL_TREE if no simplification was possible, otherwise return the
10993 simplified form of the call as a tree.
10995 The simplified form may be a constant or other expression which
10996 computes the same value, but in a more efficient manner (including
10997 calls to other builtin functions).
10999 The call may contain arguments which need to be evaluated, but
11000 which are not useful to determine the result of the call. In
11001 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11002 COMPOUND_EXPR will be an argument which must be evaluated.
11003 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11004 COMPOUND_EXPR in the chain will contain the tree for the simplified
11005 form of the builtin function call. */
11007 static tree
11008 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11010 if (!validate_arg (dst, POINTER_TYPE)
11011 || !validate_arg (src, POINTER_TYPE)
11012 || !validate_arg (len, INTEGER_TYPE))
11013 return NULL_TREE;
11014 else
11016 const char *p = c_getstr (src);
11018 /* If the requested length is zero, or the src parameter string
11019 length is zero, return the dst parameter. */
11020 if (integer_zerop (len) || (p && *p == '\0'))
11021 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11023 /* If the requested len is greater than or equal to the string
11024 length, call strcat. */
11025 if (TREE_CODE (len) == INTEGER_CST && p
11026 && compare_tree_int (len, strlen (p)) >= 0)
11028 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11030 /* If the replacement _DECL isn't initialized, don't do the
11031 transformation. */
11032 if (!fn)
11033 return NULL_TREE;
11035 return build_call_expr_loc (loc, fn, 2, dst, src);
11037 return NULL_TREE;
11041 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11042 to the call.
11044 Return NULL_TREE if no simplification was possible, otherwise return the
11045 simplified form of the call as a tree.
11047 The simplified form may be a constant or other expression which
11048 computes the same value, but in a more efficient manner (including
11049 calls to other builtin functions).
11051 The call may contain arguments which need to be evaluated, but
11052 which are not useful to determine the result of the call. In
11053 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11054 COMPOUND_EXPR will be an argument which must be evaluated.
11055 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11056 COMPOUND_EXPR in the chain will contain the tree for the simplified
11057 form of the builtin function call. */
11059 static tree
11060 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11062 if (!validate_arg (s1, POINTER_TYPE)
11063 || !validate_arg (s2, POINTER_TYPE))
11064 return NULL_TREE;
11065 else
11067 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11069 /* If both arguments are constants, evaluate at compile-time. */
11070 if (p1 && p2)
11072 const size_t r = strspn (p1, p2);
11073 return size_int (r);
11076 /* If either argument is "", return NULL_TREE. */
11077 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11078 /* Evaluate and ignore both arguments in case either one has
11079 side-effects. */
11080 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11081 s1, s2);
11082 return NULL_TREE;
11086 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11087 to the call.
11089 Return NULL_TREE if no simplification was possible, otherwise return the
11090 simplified form of the call as a tree.
11092 The simplified form may be a constant or other expression which
11093 computes the same value, but in a more efficient manner (including
11094 calls to other builtin functions).
11096 The call may contain arguments which need to be evaluated, but
11097 which are not useful to determine the result of the call. In
11098 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11099 COMPOUND_EXPR will be an argument which must be evaluated.
11100 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11101 COMPOUND_EXPR in the chain will contain the tree for the simplified
11102 form of the builtin function call. */
11104 static tree
11105 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11107 if (!validate_arg (s1, POINTER_TYPE)
11108 || !validate_arg (s2, POINTER_TYPE))
11109 return NULL_TREE;
11110 else
11112 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11114 /* If both arguments are constants, evaluate at compile-time. */
11115 if (p1 && p2)
11117 const size_t r = strcspn (p1, p2);
11118 return size_int (r);
11121 /* If the first argument is "", return NULL_TREE. */
11122 if (p1 && *p1 == '\0')
11124 /* Evaluate and ignore argument s2 in case it has
11125 side-effects. */
11126 return omit_one_operand_loc (loc, size_type_node,
11127 size_zero_node, s2);
11130 /* If the second argument is "", return __builtin_strlen(s1). */
11131 if (p2 && *p2 == '\0')
11133 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11135 /* If the replacement _DECL isn't initialized, don't do the
11136 transformation. */
11137 if (!fn)
11138 return NULL_TREE;
11140 return build_call_expr_loc (loc, fn, 1, s1);
11142 return NULL_TREE;
11146 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11147 to the call. IGNORE is true if the value returned
11148 by the builtin will be ignored. UNLOCKED is true is true if this
11149 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11150 the known length of the string. Return NULL_TREE if no simplification
11151 was possible. */
11153 tree
11154 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11155 bool ignore, bool unlocked, tree len)
11157 /* If we're using an unlocked function, assume the other unlocked
11158 functions exist explicitly. */
11159 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11160 : implicit_built_in_decls[BUILT_IN_FPUTC];
11161 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11162 : implicit_built_in_decls[BUILT_IN_FWRITE];
11164 /* If the return value is used, don't do the transformation. */
11165 if (!ignore)
11166 return NULL_TREE;
11168 /* Verify the arguments in the original call. */
11169 if (!validate_arg (arg0, POINTER_TYPE)
11170 || !validate_arg (arg1, POINTER_TYPE))
11171 return NULL_TREE;
11173 if (! len)
11174 len = c_strlen (arg0, 0);
11176 /* Get the length of the string passed to fputs. If the length
11177 can't be determined, punt. */
11178 if (!len
11179 || TREE_CODE (len) != INTEGER_CST)
11180 return NULL_TREE;
11182 switch (compare_tree_int (len, 1))
11184 case -1: /* length is 0, delete the call entirely . */
11185 return omit_one_operand_loc (loc, integer_type_node,
11186 integer_zero_node, arg1);;
11188 case 0: /* length is 1, call fputc. */
11190 const char *p = c_getstr (arg0);
11192 if (p != NULL)
11194 if (fn_fputc)
11195 return build_call_expr_loc (loc, fn_fputc, 2,
11196 build_int_cst
11197 (integer_type_node, p[0]), arg1);
11198 else
11199 return NULL_TREE;
11202 /* FALLTHROUGH */
11203 case 1: /* length is greater than 1, call fwrite. */
11205 /* If optimizing for size keep fputs. */
11206 if (optimize_function_for_size_p (cfun))
11207 return NULL_TREE;
11208 /* New argument list transforming fputs(string, stream) to
11209 fwrite(string, 1, len, stream). */
11210 if (fn_fwrite)
11211 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11212 size_one_node, len, arg1);
11213 else
11214 return NULL_TREE;
11216 default:
11217 gcc_unreachable ();
11219 return NULL_TREE;
11222 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11223 produced. False otherwise. This is done so that we don't output the error
11224 or warning twice or three times. */
11226 bool
11227 fold_builtin_next_arg (tree exp, bool va_start_p)
11229 tree fntype = TREE_TYPE (current_function_decl);
11230 int nargs = call_expr_nargs (exp);
11231 tree arg;
11233 if (!stdarg_p (fntype))
11235 error ("%<va_start%> used in function with fixed args");
11236 return true;
11239 if (va_start_p)
11241 if (va_start_p && (nargs != 2))
11243 error ("wrong number of arguments to function %<va_start%>");
11244 return true;
11246 arg = CALL_EXPR_ARG (exp, 1);
11248 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11249 when we checked the arguments and if needed issued a warning. */
11250 else
11252 if (nargs == 0)
11254 /* Evidently an out of date version of <stdarg.h>; can't validate
11255 va_start's second argument, but can still work as intended. */
11256 warning (0, "%<__builtin_next_arg%> called without an argument");
11257 return true;
11259 else if (nargs > 1)
11261 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11262 return true;
11264 arg = CALL_EXPR_ARG (exp, 0);
11267 if (TREE_CODE (arg) == SSA_NAME)
11268 arg = SSA_NAME_VAR (arg);
11270 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11271 or __builtin_next_arg (0) the first time we see it, after checking
11272 the arguments and if needed issuing a warning. */
11273 if (!integer_zerop (arg))
11275 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11277 /* Strip off all nops for the sake of the comparison. This
11278 is not quite the same as STRIP_NOPS. It does more.
11279 We must also strip off INDIRECT_EXPR for C++ reference
11280 parameters. */
11281 while (CONVERT_EXPR_P (arg)
11282 || TREE_CODE (arg) == INDIRECT_REF)
11283 arg = TREE_OPERAND (arg, 0);
11284 if (arg != last_parm)
11286 /* FIXME: Sometimes with the tree optimizers we can get the
11287 not the last argument even though the user used the last
11288 argument. We just warn and set the arg to be the last
11289 argument so that we will get wrong-code because of
11290 it. */
11291 warning (0, "second parameter of %<va_start%> not last named argument");
11294 /* Undefined by C99 7.15.1.4p4 (va_start):
11295 "If the parameter parmN is declared with the register storage
11296 class, with a function or array type, or with a type that is
11297 not compatible with the type that results after application of
11298 the default argument promotions, the behavior is undefined."
11300 else if (DECL_REGISTER (arg))
11301 warning (0, "undefined behaviour when second parameter of "
11302 "%<va_start%> is declared with %<register%> storage");
11304 /* We want to verify the second parameter just once before the tree
11305 optimizers are run and then avoid keeping it in the tree,
11306 as otherwise we could warn even for correct code like:
11307 void foo (int i, ...)
11308 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11309 if (va_start_p)
11310 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11311 else
11312 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11314 return false;
11318 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11319 ORIG may be null if this is a 2-argument call. We don't attempt to
11320 simplify calls with more than 3 arguments.
11322 Return NULL_TREE if no simplification was possible, otherwise return the
11323 simplified form of the call as a tree. If IGNORED is true, it means that
11324 the caller does not use the returned value of the function. */
11326 static tree
11327 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11328 tree orig, int ignored)
11330 tree call, retval;
11331 const char *fmt_str = NULL;
11333 /* Verify the required arguments in the original call. We deal with two
11334 types of sprintf() calls: 'sprintf (str, fmt)' and
11335 'sprintf (dest, "%s", orig)'. */
11336 if (!validate_arg (dest, POINTER_TYPE)
11337 || !validate_arg (fmt, POINTER_TYPE))
11338 return NULL_TREE;
11339 if (orig && !validate_arg (orig, POINTER_TYPE))
11340 return NULL_TREE;
11342 /* Check whether the format is a literal string constant. */
11343 fmt_str = c_getstr (fmt);
11344 if (fmt_str == NULL)
11345 return NULL_TREE;
11347 call = NULL_TREE;
11348 retval = NULL_TREE;
11350 if (!init_target_chars ())
11351 return NULL_TREE;
11353 /* If the format doesn't contain % args or %%, use strcpy. */
11354 if (strchr (fmt_str, target_percent) == NULL)
11356 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11358 if (!fn)
11359 return NULL_TREE;
11361 /* Don't optimize sprintf (buf, "abc", ptr++). */
11362 if (orig)
11363 return NULL_TREE;
11365 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11366 'format' is known to contain no % formats. */
11367 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11368 if (!ignored)
11369 retval = build_int_cst (integer_type_node, strlen (fmt_str));
11372 /* If the format is "%s", use strcpy if the result isn't used. */
11373 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11375 tree fn;
11376 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11378 if (!fn)
11379 return NULL_TREE;
11381 /* Don't crash on sprintf (str1, "%s"). */
11382 if (!orig)
11383 return NULL_TREE;
11385 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11386 if (!ignored)
11388 retval = c_strlen (orig, 1);
11389 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11390 return NULL_TREE;
11392 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11395 if (call && retval)
11397 retval = fold_convert_loc
11398 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11399 retval);
11400 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11402 else
11403 return call;
11406 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
11407 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
11408 attempt to simplify calls with more than 4 arguments.
11410 Return NULL_TREE if no simplification was possible, otherwise return the
11411 simplified form of the call as a tree. If IGNORED is true, it means that
11412 the caller does not use the returned value of the function. */
11414 static tree
11415 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
11416 tree orig, int ignored)
11418 tree call, retval;
11419 const char *fmt_str = NULL;
11420 unsigned HOST_WIDE_INT destlen;
11422 /* Verify the required arguments in the original call. We deal with two
11423 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
11424 'snprintf (dest, cst, "%s", orig)'. */
11425 if (!validate_arg (dest, POINTER_TYPE)
11426 || !validate_arg (destsize, INTEGER_TYPE)
11427 || !validate_arg (fmt, POINTER_TYPE))
11428 return NULL_TREE;
11429 if (orig && !validate_arg (orig, POINTER_TYPE))
11430 return NULL_TREE;
11432 if (!host_integerp (destsize, 1))
11433 return NULL_TREE;
11435 /* Check whether the format is a literal string constant. */
11436 fmt_str = c_getstr (fmt);
11437 if (fmt_str == NULL)
11438 return NULL_TREE;
11440 call = NULL_TREE;
11441 retval = NULL_TREE;
11443 if (!init_target_chars ())
11444 return NULL_TREE;
11446 destlen = tree_low_cst (destsize, 1);
11448 /* If the format doesn't contain % args or %%, use strcpy. */
11449 if (strchr (fmt_str, target_percent) == NULL)
11451 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11452 size_t len = strlen (fmt_str);
11454 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
11455 if (orig)
11456 return NULL_TREE;
11458 /* We could expand this as
11459 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
11460 or to
11461 memcpy (str, fmt_with_nul_at_cstm1, cst);
11462 but in the former case that might increase code size
11463 and in the latter case grow .rodata section too much.
11464 So punt for now. */
11465 if (len >= destlen)
11466 return NULL_TREE;
11468 if (!fn)
11469 return NULL_TREE;
11471 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
11472 'format' is known to contain no % formats and
11473 strlen (fmt) < cst. */
11474 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11476 if (!ignored)
11477 retval = build_int_cst (integer_type_node, strlen (fmt_str));
11480 /* If the format is "%s", use strcpy if the result isn't used. */
11481 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11483 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11484 unsigned HOST_WIDE_INT origlen;
11486 /* Don't crash on snprintf (str1, cst, "%s"). */
11487 if (!orig)
11488 return NULL_TREE;
11490 retval = c_strlen (orig, 1);
11491 if (!retval || !host_integerp (retval, 1))
11492 return NULL_TREE;
11494 origlen = tree_low_cst (retval, 1);
11495 /* We could expand this as
11496 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
11497 or to
11498 memcpy (str1, str2_with_nul_at_cstm1, cst);
11499 but in the former case that might increase code size
11500 and in the latter case grow .rodata section too much.
11501 So punt for now. */
11502 if (origlen >= destlen)
11503 return NULL_TREE;
11505 /* Convert snprintf (str1, cst, "%s", str2) into
11506 strcpy (str1, str2) if strlen (str2) < cst. */
11507 if (!fn)
11508 return NULL_TREE;
11510 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11512 if (ignored)
11513 retval = NULL_TREE;
11516 if (call && retval)
11518 tree fn = built_in_decls[BUILT_IN_SNPRINTF];
11519 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
11520 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11522 else
11523 return call;
11526 /* Expand a call EXP to __builtin_object_size. */
11529 expand_builtin_object_size (tree exp)
11531 tree ost;
11532 int object_size_type;
11533 tree fndecl = get_callee_fndecl (exp);
11535 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11537 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11538 exp, fndecl);
11539 expand_builtin_trap ();
11540 return const0_rtx;
11543 ost = CALL_EXPR_ARG (exp, 1);
11544 STRIP_NOPS (ost);
11546 if (TREE_CODE (ost) != INTEGER_CST
11547 || tree_int_cst_sgn (ost) < 0
11548 || compare_tree_int (ost, 3) > 0)
11550 error ("%Klast argument of %D is not integer constant between 0 and 3",
11551 exp, fndecl);
11552 expand_builtin_trap ();
11553 return const0_rtx;
11556 object_size_type = tree_low_cst (ost, 0);
11558 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11561 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11562 FCODE is the BUILT_IN_* to use.
11563 Return NULL_RTX if we failed; the caller should emit a normal call,
11564 otherwise try to get the result in TARGET, if convenient (and in
11565 mode MODE if that's convenient). */
11567 static rtx
11568 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11569 enum built_in_function fcode)
11571 tree dest, src, len, size;
11573 if (!validate_arglist (exp,
11574 POINTER_TYPE,
11575 fcode == BUILT_IN_MEMSET_CHK
11576 ? INTEGER_TYPE : POINTER_TYPE,
11577 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11578 return NULL_RTX;
11580 dest = CALL_EXPR_ARG (exp, 0);
11581 src = CALL_EXPR_ARG (exp, 1);
11582 len = CALL_EXPR_ARG (exp, 2);
11583 size = CALL_EXPR_ARG (exp, 3);
11585 if (! host_integerp (size, 1))
11586 return NULL_RTX;
11588 if (host_integerp (len, 1) || integer_all_onesp (size))
11590 tree fn;
11592 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11594 warning_at (tree_nonartificial_location (exp),
11595 0, "%Kcall to %D will always overflow destination buffer",
11596 exp, get_callee_fndecl (exp));
11597 return NULL_RTX;
11600 fn = NULL_TREE;
11601 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11602 mem{cpy,pcpy,move,set} is available. */
11603 switch (fcode)
11605 case BUILT_IN_MEMCPY_CHK:
11606 fn = built_in_decls[BUILT_IN_MEMCPY];
11607 break;
11608 case BUILT_IN_MEMPCPY_CHK:
11609 fn = built_in_decls[BUILT_IN_MEMPCPY];
11610 break;
11611 case BUILT_IN_MEMMOVE_CHK:
11612 fn = built_in_decls[BUILT_IN_MEMMOVE];
11613 break;
11614 case BUILT_IN_MEMSET_CHK:
11615 fn = built_in_decls[BUILT_IN_MEMSET];
11616 break;
11617 default:
11618 break;
11621 if (! fn)
11622 return NULL_RTX;
11624 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11625 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11626 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11627 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11629 else if (fcode == BUILT_IN_MEMSET_CHK)
11630 return NULL_RTX;
11631 else
11633 unsigned int dest_align
11634 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11636 /* If DEST is not a pointer type, call the normal function. */
11637 if (dest_align == 0)
11638 return NULL_RTX;
11640 /* If SRC and DEST are the same (and not volatile), do nothing. */
11641 if (operand_equal_p (src, dest, 0))
11643 tree expr;
11645 if (fcode != BUILT_IN_MEMPCPY_CHK)
11647 /* Evaluate and ignore LEN in case it has side-effects. */
11648 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11649 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11652 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11653 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11656 /* __memmove_chk special case. */
11657 if (fcode == BUILT_IN_MEMMOVE_CHK)
11659 unsigned int src_align
11660 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11662 if (src_align == 0)
11663 return NULL_RTX;
11665 /* If src is categorized for a readonly section we can use
11666 normal __memcpy_chk. */
11667 if (readonly_data_expr (src))
11669 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11670 if (!fn)
11671 return NULL_RTX;
11672 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11673 dest, src, len, size);
11674 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11675 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11676 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11679 return NULL_RTX;
11683 /* Emit warning if a buffer overflow is detected at compile time. */
11685 static void
11686 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11688 int is_strlen = 0;
11689 tree len, size;
11690 location_t loc = tree_nonartificial_location (exp);
11692 switch (fcode)
11694 case BUILT_IN_STRCPY_CHK:
11695 case BUILT_IN_STPCPY_CHK:
11696 /* For __strcat_chk the warning will be emitted only if overflowing
11697 by at least strlen (dest) + 1 bytes. */
11698 case BUILT_IN_STRCAT_CHK:
11699 len = CALL_EXPR_ARG (exp, 1);
11700 size = CALL_EXPR_ARG (exp, 2);
11701 is_strlen = 1;
11702 break;
11703 case BUILT_IN_STRNCAT_CHK:
11704 case BUILT_IN_STRNCPY_CHK:
11705 len = CALL_EXPR_ARG (exp, 2);
11706 size = CALL_EXPR_ARG (exp, 3);
11707 break;
11708 case BUILT_IN_SNPRINTF_CHK:
11709 case BUILT_IN_VSNPRINTF_CHK:
11710 len = CALL_EXPR_ARG (exp, 1);
11711 size = CALL_EXPR_ARG (exp, 3);
11712 break;
11713 default:
11714 gcc_unreachable ();
11717 if (!len || !size)
11718 return;
11720 if (! host_integerp (size, 1) || integer_all_onesp (size))
11721 return;
11723 if (is_strlen)
11725 len = c_strlen (len, 1);
11726 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11727 return;
11729 else if (fcode == BUILT_IN_STRNCAT_CHK)
11731 tree src = CALL_EXPR_ARG (exp, 1);
11732 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11733 return;
11734 src = c_strlen (src, 1);
11735 if (! src || ! host_integerp (src, 1))
11737 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11738 exp, get_callee_fndecl (exp));
11739 return;
11741 else if (tree_int_cst_lt (src, size))
11742 return;
11744 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11745 return;
11747 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11748 exp, get_callee_fndecl (exp));
11751 /* Emit warning if a buffer overflow is detected at compile time
11752 in __sprintf_chk/__vsprintf_chk calls. */
11754 static void
11755 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11757 tree size, len, fmt;
11758 const char *fmt_str;
11759 int nargs = call_expr_nargs (exp);
11761 /* Verify the required arguments in the original call. */
11763 if (nargs < 4)
11764 return;
11765 size = CALL_EXPR_ARG (exp, 2);
11766 fmt = CALL_EXPR_ARG (exp, 3);
11768 if (! host_integerp (size, 1) || integer_all_onesp (size))
11769 return;
11771 /* Check whether the format is a literal string constant. */
11772 fmt_str = c_getstr (fmt);
11773 if (fmt_str == NULL)
11774 return;
11776 if (!init_target_chars ())
11777 return;
11779 /* If the format doesn't contain % args or %%, we know its size. */
11780 if (strchr (fmt_str, target_percent) == 0)
11781 len = build_int_cstu (size_type_node, strlen (fmt_str));
11782 /* If the format is "%s" and first ... argument is a string literal,
11783 we know it too. */
11784 else if (fcode == BUILT_IN_SPRINTF_CHK
11785 && strcmp (fmt_str, target_percent_s) == 0)
11787 tree arg;
11789 if (nargs < 5)
11790 return;
11791 arg = CALL_EXPR_ARG (exp, 4);
11792 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11793 return;
11795 len = c_strlen (arg, 1);
11796 if (!len || ! host_integerp (len, 1))
11797 return;
11799 else
11800 return;
11802 if (! tree_int_cst_lt (len, size))
11803 warning_at (tree_nonartificial_location (exp),
11804 0, "%Kcall to %D will always overflow destination buffer",
11805 exp, get_callee_fndecl (exp));
11808 /* Emit warning if a free is called with address of a variable. */
11810 static void
11811 maybe_emit_free_warning (tree exp)
11813 tree arg = CALL_EXPR_ARG (exp, 0);
11815 STRIP_NOPS (arg);
11816 if (TREE_CODE (arg) != ADDR_EXPR)
11817 return;
11819 arg = get_base_address (TREE_OPERAND (arg, 0));
11820 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11821 return;
11823 if (SSA_VAR_P (arg))
11824 warning_at (tree_nonartificial_location (exp),
11825 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11826 else
11827 warning_at (tree_nonartificial_location (exp),
11828 0, "%Kattempt to free a non-heap object", exp);
11831 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11832 if possible. */
11834 tree
11835 fold_builtin_object_size (tree ptr, tree ost)
11837 unsigned HOST_WIDE_INT bytes;
11838 int object_size_type;
11840 if (!validate_arg (ptr, POINTER_TYPE)
11841 || !validate_arg (ost, INTEGER_TYPE))
11842 return NULL_TREE;
11844 STRIP_NOPS (ost);
11846 if (TREE_CODE (ost) != INTEGER_CST
11847 || tree_int_cst_sgn (ost) < 0
11848 || compare_tree_int (ost, 3) > 0)
11849 return NULL_TREE;
11851 object_size_type = tree_low_cst (ost, 0);
11853 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11854 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11855 and (size_t) 0 for types 2 and 3. */
11856 if (TREE_SIDE_EFFECTS (ptr))
11857 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11859 if (TREE_CODE (ptr) == ADDR_EXPR)
11861 bytes = compute_builtin_object_size (ptr, object_size_type);
11862 if (double_int_fits_to_tree_p (size_type_node,
11863 uhwi_to_double_int (bytes)))
11864 return build_int_cstu (size_type_node, bytes);
11866 else if (TREE_CODE (ptr) == SSA_NAME)
11868 /* If object size is not known yet, delay folding until
11869 later. Maybe subsequent passes will help determining
11870 it. */
11871 bytes = compute_builtin_object_size (ptr, object_size_type);
11872 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11873 && double_int_fits_to_tree_p (size_type_node,
11874 uhwi_to_double_int (bytes)))
11875 return build_int_cstu (size_type_node, bytes);
11878 return NULL_TREE;
11881 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11882 DEST, SRC, LEN, and SIZE are the arguments to the call.
11883 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11884 code of the builtin. If MAXLEN is not NULL, it is maximum length
11885 passed as third argument. */
11887 tree
11888 fold_builtin_memory_chk (location_t loc, tree fndecl,
11889 tree dest, tree src, tree len, tree size,
11890 tree maxlen, bool ignore,
11891 enum built_in_function fcode)
11893 tree fn;
11895 if (!validate_arg (dest, POINTER_TYPE)
11896 || !validate_arg (src,
11897 (fcode == BUILT_IN_MEMSET_CHK
11898 ? INTEGER_TYPE : POINTER_TYPE))
11899 || !validate_arg (len, INTEGER_TYPE)
11900 || !validate_arg (size, INTEGER_TYPE))
11901 return NULL_TREE;
11903 /* If SRC and DEST are the same (and not volatile), return DEST
11904 (resp. DEST+LEN for __mempcpy_chk). */
11905 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11907 if (fcode != BUILT_IN_MEMPCPY_CHK)
11908 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
11909 dest, len);
11910 else
11912 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
11913 dest, len);
11914 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
11918 if (! host_integerp (size, 1))
11919 return NULL_TREE;
11921 if (! integer_all_onesp (size))
11923 if (! host_integerp (len, 1))
11925 /* If LEN is not constant, try MAXLEN too.
11926 For MAXLEN only allow optimizing into non-_ocs function
11927 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11928 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11930 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11932 /* (void) __mempcpy_chk () can be optimized into
11933 (void) __memcpy_chk (). */
11934 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11935 if (!fn)
11936 return NULL_TREE;
11938 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
11940 return NULL_TREE;
11943 else
11944 maxlen = len;
11946 if (tree_int_cst_lt (size, maxlen))
11947 return NULL_TREE;
11950 fn = NULL_TREE;
11951 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11952 mem{cpy,pcpy,move,set} is available. */
11953 switch (fcode)
11955 case BUILT_IN_MEMCPY_CHK:
11956 fn = built_in_decls[BUILT_IN_MEMCPY];
11957 break;
11958 case BUILT_IN_MEMPCPY_CHK:
11959 fn = built_in_decls[BUILT_IN_MEMPCPY];
11960 break;
11961 case BUILT_IN_MEMMOVE_CHK:
11962 fn = built_in_decls[BUILT_IN_MEMMOVE];
11963 break;
11964 case BUILT_IN_MEMSET_CHK:
11965 fn = built_in_decls[BUILT_IN_MEMSET];
11966 break;
11967 default:
11968 break;
11971 if (!fn)
11972 return NULL_TREE;
11974 return build_call_expr_loc (loc, fn, 3, dest, src, len);
11977 /* Fold a call to the __st[rp]cpy_chk builtin.
11978 DEST, SRC, and SIZE are the arguments to the call.
11979 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11980 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11981 strings passed as second argument. */
11983 tree
11984 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
11985 tree src, tree size,
11986 tree maxlen, bool ignore,
11987 enum built_in_function fcode)
11989 tree len, fn;
11991 if (!validate_arg (dest, POINTER_TYPE)
11992 || !validate_arg (src, POINTER_TYPE)
11993 || !validate_arg (size, INTEGER_TYPE))
11994 return NULL_TREE;
11996 /* If SRC and DEST are the same (and not volatile), return DEST. */
11997 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
11998 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12000 if (! host_integerp (size, 1))
12001 return NULL_TREE;
12003 if (! integer_all_onesp (size))
12005 len = c_strlen (src, 1);
12006 if (! len || ! host_integerp (len, 1))
12008 /* If LEN is not constant, try MAXLEN too.
12009 For MAXLEN only allow optimizing into non-_ocs function
12010 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12011 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12013 if (fcode == BUILT_IN_STPCPY_CHK)
12015 if (! ignore)
12016 return NULL_TREE;
12018 /* If return value of __stpcpy_chk is ignored,
12019 optimize into __strcpy_chk. */
12020 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12021 if (!fn)
12022 return NULL_TREE;
12024 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12027 if (! len || TREE_SIDE_EFFECTS (len))
12028 return NULL_TREE;
12030 /* If c_strlen returned something, but not a constant,
12031 transform __strcpy_chk into __memcpy_chk. */
12032 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12033 if (!fn)
12034 return NULL_TREE;
12036 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12037 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12038 build_call_expr_loc (loc, fn, 4,
12039 dest, src, len, size));
12042 else
12043 maxlen = len;
12045 if (! tree_int_cst_lt (maxlen, size))
12046 return NULL_TREE;
12049 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12050 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12051 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12052 if (!fn)
12053 return NULL_TREE;
12055 return build_call_expr_loc (loc, fn, 2, dest, src);
12058 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12059 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12060 length passed as third argument. */
12062 tree
12063 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12064 tree len, tree size, tree maxlen)
12066 tree fn;
12068 if (!validate_arg (dest, POINTER_TYPE)
12069 || !validate_arg (src, POINTER_TYPE)
12070 || !validate_arg (len, INTEGER_TYPE)
12071 || !validate_arg (size, INTEGER_TYPE))
12072 return NULL_TREE;
12074 if (! host_integerp (size, 1))
12075 return NULL_TREE;
12077 if (! integer_all_onesp (size))
12079 if (! host_integerp (len, 1))
12081 /* If LEN is not constant, try MAXLEN too.
12082 For MAXLEN only allow optimizing into non-_ocs function
12083 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12084 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12085 return NULL_TREE;
12087 else
12088 maxlen = len;
12090 if (tree_int_cst_lt (size, maxlen))
12091 return NULL_TREE;
12094 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12095 fn = built_in_decls[BUILT_IN_STRNCPY];
12096 if (!fn)
12097 return NULL_TREE;
12099 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12102 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12103 are the arguments to the call. */
12105 static tree
12106 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12107 tree src, tree size)
12109 tree fn;
12110 const char *p;
12112 if (!validate_arg (dest, POINTER_TYPE)
12113 || !validate_arg (src, POINTER_TYPE)
12114 || !validate_arg (size, INTEGER_TYPE))
12115 return NULL_TREE;
12117 p = c_getstr (src);
12118 /* If the SRC parameter is "", return DEST. */
12119 if (p && *p == '\0')
12120 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12122 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12123 return NULL_TREE;
12125 /* If __builtin_strcat_chk is used, assume strcat is available. */
12126 fn = built_in_decls[BUILT_IN_STRCAT];
12127 if (!fn)
12128 return NULL_TREE;
12130 return build_call_expr_loc (loc, fn, 2, dest, src);
12133 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12134 LEN, and SIZE. */
12136 static tree
12137 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12138 tree dest, tree src, tree len, tree size)
12140 tree fn;
12141 const char *p;
12143 if (!validate_arg (dest, POINTER_TYPE)
12144 || !validate_arg (src, POINTER_TYPE)
12145 || !validate_arg (size, INTEGER_TYPE)
12146 || !validate_arg (size, INTEGER_TYPE))
12147 return NULL_TREE;
12149 p = c_getstr (src);
12150 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12151 if (p && *p == '\0')
12152 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12153 else if (integer_zerop (len))
12154 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12156 if (! host_integerp (size, 1))
12157 return NULL_TREE;
12159 if (! integer_all_onesp (size))
12161 tree src_len = c_strlen (src, 1);
12162 if (src_len
12163 && host_integerp (src_len, 1)
12164 && host_integerp (len, 1)
12165 && ! tree_int_cst_lt (len, src_len))
12167 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12168 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12169 if (!fn)
12170 return NULL_TREE;
12172 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12174 return NULL_TREE;
12177 /* If __builtin_strncat_chk is used, assume strncat is available. */
12178 fn = built_in_decls[BUILT_IN_STRNCAT];
12179 if (!fn)
12180 return NULL_TREE;
12182 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12185 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12186 Return NULL_TREE if a normal call should be emitted rather than
12187 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12188 or BUILT_IN_VSPRINTF_CHK. */
12190 static tree
12191 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12192 enum built_in_function fcode)
12194 tree dest, size, len, fn, fmt, flag;
12195 const char *fmt_str;
12197 /* Verify the required arguments in the original call. */
12198 if (nargs < 4)
12199 return NULL_TREE;
12200 dest = args[0];
12201 if (!validate_arg (dest, POINTER_TYPE))
12202 return NULL_TREE;
12203 flag = args[1];
12204 if (!validate_arg (flag, INTEGER_TYPE))
12205 return NULL_TREE;
12206 size = args[2];
12207 if (!validate_arg (size, INTEGER_TYPE))
12208 return NULL_TREE;
12209 fmt = args[3];
12210 if (!validate_arg (fmt, POINTER_TYPE))
12211 return NULL_TREE;
12213 if (! host_integerp (size, 1))
12214 return NULL_TREE;
12216 len = NULL_TREE;
12218 if (!init_target_chars ())
12219 return NULL_TREE;
12221 /* Check whether the format is a literal string constant. */
12222 fmt_str = c_getstr (fmt);
12223 if (fmt_str != NULL)
12225 /* If the format doesn't contain % args or %%, we know the size. */
12226 if (strchr (fmt_str, target_percent) == 0)
12228 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12229 len = build_int_cstu (size_type_node, strlen (fmt_str));
12231 /* If the format is "%s" and first ... argument is a string literal,
12232 we know the size too. */
12233 else if (fcode == BUILT_IN_SPRINTF_CHK
12234 && strcmp (fmt_str, target_percent_s) == 0)
12236 tree arg;
12238 if (nargs == 5)
12240 arg = args[4];
12241 if (validate_arg (arg, POINTER_TYPE))
12243 len = c_strlen (arg, 1);
12244 if (! len || ! host_integerp (len, 1))
12245 len = NULL_TREE;
12251 if (! integer_all_onesp (size))
12253 if (! len || ! tree_int_cst_lt (len, size))
12254 return NULL_TREE;
12257 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12258 or if format doesn't contain % chars or is "%s". */
12259 if (! integer_zerop (flag))
12261 if (fmt_str == NULL)
12262 return NULL_TREE;
12263 if (strchr (fmt_str, target_percent) != NULL
12264 && strcmp (fmt_str, target_percent_s))
12265 return NULL_TREE;
12268 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12269 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12270 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12271 if (!fn)
12272 return NULL_TREE;
12274 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
12277 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12278 a normal call should be emitted rather than expanding the function
12279 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12281 static tree
12282 fold_builtin_sprintf_chk (location_t loc, tree exp,
12283 enum built_in_function fcode)
12285 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
12286 CALL_EXPR_ARGP (exp), fcode);
12289 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
12290 NULL_TREE if a normal call should be emitted rather than expanding
12291 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12292 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12293 passed as second argument. */
12295 static tree
12296 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
12297 tree maxlen, enum built_in_function fcode)
12299 tree dest, size, len, fn, fmt, flag;
12300 const char *fmt_str;
12302 /* Verify the required arguments in the original call. */
12303 if (nargs < 5)
12304 return NULL_TREE;
12305 dest = args[0];
12306 if (!validate_arg (dest, POINTER_TYPE))
12307 return NULL_TREE;
12308 len = args[1];
12309 if (!validate_arg (len, INTEGER_TYPE))
12310 return NULL_TREE;
12311 flag = args[2];
12312 if (!validate_arg (flag, INTEGER_TYPE))
12313 return NULL_TREE;
12314 size = args[3];
12315 if (!validate_arg (size, INTEGER_TYPE))
12316 return NULL_TREE;
12317 fmt = args[4];
12318 if (!validate_arg (fmt, POINTER_TYPE))
12319 return NULL_TREE;
12321 if (! host_integerp (size, 1))
12322 return NULL_TREE;
12324 if (! integer_all_onesp (size))
12326 if (! host_integerp (len, 1))
12328 /* If LEN is not constant, try MAXLEN too.
12329 For MAXLEN only allow optimizing into non-_ocs function
12330 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12331 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12332 return NULL_TREE;
12334 else
12335 maxlen = len;
12337 if (tree_int_cst_lt (size, maxlen))
12338 return NULL_TREE;
12341 if (!init_target_chars ())
12342 return NULL_TREE;
12344 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12345 or if format doesn't contain % chars or is "%s". */
12346 if (! integer_zerop (flag))
12348 fmt_str = c_getstr (fmt);
12349 if (fmt_str == NULL)
12350 return NULL_TREE;
12351 if (strchr (fmt_str, target_percent) != NULL
12352 && strcmp (fmt_str, target_percent_s))
12353 return NULL_TREE;
12356 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12357 available. */
12358 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12359 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12360 if (!fn)
12361 return NULL_TREE;
12363 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
12366 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12367 a normal call should be emitted rather than expanding the function
12368 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12369 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12370 passed as second argument. */
12372 tree
12373 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12374 enum built_in_function fcode)
12376 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
12377 CALL_EXPR_ARGP (exp), maxlen, fcode);
12380 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12381 FMT and ARG are the arguments to the call; we don't fold cases with
12382 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12384 Return NULL_TREE if no simplification was possible, otherwise return the
12385 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12386 code of the function to be simplified. */
12388 static tree
12389 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12390 tree arg, bool ignore,
12391 enum built_in_function fcode)
12393 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12394 const char *fmt_str = NULL;
12396 /* If the return value is used, don't do the transformation. */
12397 if (! ignore)
12398 return NULL_TREE;
12400 /* Verify the required arguments in the original call. */
12401 if (!validate_arg (fmt, POINTER_TYPE))
12402 return NULL_TREE;
12404 /* Check whether the format is a literal string constant. */
12405 fmt_str = c_getstr (fmt);
12406 if (fmt_str == NULL)
12407 return NULL_TREE;
12409 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12411 /* If we're using an unlocked function, assume the other
12412 unlocked functions exist explicitly. */
12413 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12414 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12416 else
12418 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12419 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12422 if (!init_target_chars ())
12423 return NULL_TREE;
12425 if (strcmp (fmt_str, target_percent_s) == 0
12426 || strchr (fmt_str, target_percent) == NULL)
12428 const char *str;
12430 if (strcmp (fmt_str, target_percent_s) == 0)
12432 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12433 return NULL_TREE;
12435 if (!arg || !validate_arg (arg, POINTER_TYPE))
12436 return NULL_TREE;
12438 str = c_getstr (arg);
12439 if (str == NULL)
12440 return NULL_TREE;
12442 else
12444 /* The format specifier doesn't contain any '%' characters. */
12445 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12446 && arg)
12447 return NULL_TREE;
12448 str = fmt_str;
12451 /* If the string was "", printf does nothing. */
12452 if (str[0] == '\0')
12453 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12455 /* If the string has length of 1, call putchar. */
12456 if (str[1] == '\0')
12458 /* Given printf("c"), (where c is any one character,)
12459 convert "c"[0] to an int and pass that to the replacement
12460 function. */
12461 newarg = build_int_cst (integer_type_node, str[0]);
12462 if (fn_putchar)
12463 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12465 else
12467 /* If the string was "string\n", call puts("string"). */
12468 size_t len = strlen (str);
12469 if ((unsigned char)str[len - 1] == target_newline
12470 && (size_t) (int) len == len
12471 && (int) len > 0)
12473 char *newstr;
12474 tree offset_node, string_cst;
12476 /* Create a NUL-terminated string that's one char shorter
12477 than the original, stripping off the trailing '\n'. */
12478 newarg = build_string_literal (len, str);
12479 string_cst = string_constant (newarg, &offset_node);
12480 gcc_checking_assert (string_cst
12481 && (TREE_STRING_LENGTH (string_cst)
12482 == (int) len)
12483 && integer_zerop (offset_node)
12484 && (unsigned char)
12485 TREE_STRING_POINTER (string_cst)[len - 1]
12486 == target_newline);
12487 /* build_string_literal creates a new STRING_CST,
12488 modify it in place to avoid double copying. */
12489 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
12490 newstr[len - 1] = '\0';
12491 if (fn_puts)
12492 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12494 else
12495 /* We'd like to arrange to call fputs(string,stdout) here,
12496 but we need stdout and don't have a way to get it yet. */
12497 return NULL_TREE;
12501 /* The other optimizations can be done only on the non-va_list variants. */
12502 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12503 return NULL_TREE;
12505 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12506 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12508 if (!arg || !validate_arg (arg, POINTER_TYPE))
12509 return NULL_TREE;
12510 if (fn_puts)
12511 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12514 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12515 else if (strcmp (fmt_str, target_percent_c) == 0)
12517 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12518 return NULL_TREE;
12519 if (fn_putchar)
12520 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12523 if (!call)
12524 return NULL_TREE;
12526 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12529 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12530 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12531 more than 3 arguments, and ARG may be null in the 2-argument case.
12533 Return NULL_TREE if no simplification was possible, otherwise return the
12534 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12535 code of the function to be simplified. */
12537 static tree
12538 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12539 tree fmt, tree arg, bool ignore,
12540 enum built_in_function fcode)
12542 tree fn_fputc, fn_fputs, call = NULL_TREE;
12543 const char *fmt_str = NULL;
12545 /* If the return value is used, don't do the transformation. */
12546 if (! ignore)
12547 return NULL_TREE;
12549 /* Verify the required arguments in the original call. */
12550 if (!validate_arg (fp, POINTER_TYPE))
12551 return NULL_TREE;
12552 if (!validate_arg (fmt, POINTER_TYPE))
12553 return NULL_TREE;
12555 /* Check whether the format is a literal string constant. */
12556 fmt_str = c_getstr (fmt);
12557 if (fmt_str == NULL)
12558 return NULL_TREE;
12560 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12562 /* If we're using an unlocked function, assume the other
12563 unlocked functions exist explicitly. */
12564 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12565 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12567 else
12569 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12570 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12573 if (!init_target_chars ())
12574 return NULL_TREE;
12576 /* If the format doesn't contain % args or %%, use strcpy. */
12577 if (strchr (fmt_str, target_percent) == NULL)
12579 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12580 && arg)
12581 return NULL_TREE;
12583 /* If the format specifier was "", fprintf does nothing. */
12584 if (fmt_str[0] == '\0')
12586 /* If FP has side-effects, just wait until gimplification is
12587 done. */
12588 if (TREE_SIDE_EFFECTS (fp))
12589 return NULL_TREE;
12591 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12594 /* When "string" doesn't contain %, replace all cases of
12595 fprintf (fp, string) with fputs (string, fp). The fputs
12596 builtin will take care of special cases like length == 1. */
12597 if (fn_fputs)
12598 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12601 /* The other optimizations can be done only on the non-va_list variants. */
12602 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12603 return NULL_TREE;
12605 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12606 else if (strcmp (fmt_str, target_percent_s) == 0)
12608 if (!arg || !validate_arg (arg, POINTER_TYPE))
12609 return NULL_TREE;
12610 if (fn_fputs)
12611 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12614 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12615 else if (strcmp (fmt_str, target_percent_c) == 0)
12617 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12618 return NULL_TREE;
12619 if (fn_fputc)
12620 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12623 if (!call)
12624 return NULL_TREE;
12625 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12628 /* Initialize format string characters in the target charset. */
12630 static bool
12631 init_target_chars (void)
12633 static bool init;
12634 if (!init)
12636 target_newline = lang_hooks.to_target_charset ('\n');
12637 target_percent = lang_hooks.to_target_charset ('%');
12638 target_c = lang_hooks.to_target_charset ('c');
12639 target_s = lang_hooks.to_target_charset ('s');
12640 if (target_newline == 0 || target_percent == 0 || target_c == 0
12641 || target_s == 0)
12642 return false;
12644 target_percent_c[0] = target_percent;
12645 target_percent_c[1] = target_c;
12646 target_percent_c[2] = '\0';
12648 target_percent_s[0] = target_percent;
12649 target_percent_s[1] = target_s;
12650 target_percent_s[2] = '\0';
12652 target_percent_s_newline[0] = target_percent;
12653 target_percent_s_newline[1] = target_s;
12654 target_percent_s_newline[2] = target_newline;
12655 target_percent_s_newline[3] = '\0';
12657 init = true;
12659 return true;
12662 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12663 and no overflow/underflow occurred. INEXACT is true if M was not
12664 exactly calculated. TYPE is the tree type for the result. This
12665 function assumes that you cleared the MPFR flags and then
12666 calculated M to see if anything subsequently set a flag prior to
12667 entering this function. Return NULL_TREE if any checks fail. */
12669 static tree
12670 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12672 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12673 overflow/underflow occurred. If -frounding-math, proceed iff the
12674 result of calling FUNC was exact. */
12675 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12676 && (!flag_rounding_math || !inexact))
12678 REAL_VALUE_TYPE rr;
12680 real_from_mpfr (&rr, m, type, GMP_RNDN);
12681 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12682 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12683 but the mpft_t is not, then we underflowed in the
12684 conversion. */
12685 if (real_isfinite (&rr)
12686 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12688 REAL_VALUE_TYPE rmode;
12690 real_convert (&rmode, TYPE_MODE (type), &rr);
12691 /* Proceed iff the specified mode can hold the value. */
12692 if (real_identical (&rmode, &rr))
12693 return build_real (type, rmode);
12696 return NULL_TREE;
12699 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12700 number and no overflow/underflow occurred. INEXACT is true if M
12701 was not exactly calculated. TYPE is the tree type for the result.
12702 This function assumes that you cleared the MPFR flags and then
12703 calculated M to see if anything subsequently set a flag prior to
12704 entering this function. Return NULL_TREE if any checks fail, if
12705 FORCE_CONVERT is true, then bypass the checks. */
12707 static tree
12708 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12710 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12711 overflow/underflow occurred. If -frounding-math, proceed iff the
12712 result of calling FUNC was exact. */
12713 if (force_convert
12714 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12715 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12716 && (!flag_rounding_math || !inexact)))
12718 REAL_VALUE_TYPE re, im;
12720 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12721 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12722 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12723 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12724 but the mpft_t is not, then we underflowed in the
12725 conversion. */
12726 if (force_convert
12727 || (real_isfinite (&re) && real_isfinite (&im)
12728 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12729 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12731 REAL_VALUE_TYPE re_mode, im_mode;
12733 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12734 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12735 /* Proceed iff the specified mode can hold the value. */
12736 if (force_convert
12737 || (real_identical (&re_mode, &re)
12738 && real_identical (&im_mode, &im)))
12739 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12740 build_real (TREE_TYPE (type), im_mode));
12743 return NULL_TREE;
12746 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12747 FUNC on it and return the resulting value as a tree with type TYPE.
12748 If MIN and/or MAX are not NULL, then the supplied ARG must be
12749 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12750 acceptable values, otherwise they are not. The mpfr precision is
12751 set to the precision of TYPE. We assume that function FUNC returns
12752 zero if the result could be calculated exactly within the requested
12753 precision. */
12755 static tree
12756 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12757 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12758 bool inclusive)
12760 tree result = NULL_TREE;
12762 STRIP_NOPS (arg);
12764 /* To proceed, MPFR must exactly represent the target floating point
12765 format, which only happens when the target base equals two. */
12766 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12767 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12769 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12771 if (real_isfinite (ra)
12772 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12773 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12775 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12776 const int prec = fmt->p;
12777 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12778 int inexact;
12779 mpfr_t m;
12781 mpfr_init2 (m, prec);
12782 mpfr_from_real (m, ra, GMP_RNDN);
12783 mpfr_clear_flags ();
12784 inexact = func (m, m, rnd);
12785 result = do_mpfr_ckconv (m, type, inexact);
12786 mpfr_clear (m);
12790 return result;
12793 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12794 FUNC on it and return the resulting value as a tree with type TYPE.
12795 The mpfr precision is set to the precision of TYPE. We assume that
12796 function FUNC returns zero if the result could be calculated
12797 exactly within the requested precision. */
12799 static tree
12800 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12801 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12803 tree result = NULL_TREE;
12805 STRIP_NOPS (arg1);
12806 STRIP_NOPS (arg2);
12808 /* To proceed, MPFR must exactly represent the target floating point
12809 format, which only happens when the target base equals two. */
12810 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12811 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12812 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12814 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12815 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12817 if (real_isfinite (ra1) && real_isfinite (ra2))
12819 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12820 const int prec = fmt->p;
12821 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12822 int inexact;
12823 mpfr_t m1, m2;
12825 mpfr_inits2 (prec, m1, m2, NULL);
12826 mpfr_from_real (m1, ra1, GMP_RNDN);
12827 mpfr_from_real (m2, ra2, GMP_RNDN);
12828 mpfr_clear_flags ();
12829 inexact = func (m1, m1, m2, rnd);
12830 result = do_mpfr_ckconv (m1, type, inexact);
12831 mpfr_clears (m1, m2, NULL);
12835 return result;
12838 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12839 FUNC on it and return the resulting value as a tree with type TYPE.
12840 The mpfr precision is set to the precision of TYPE. We assume that
12841 function FUNC returns zero if the result could be calculated
12842 exactly within the requested precision. */
12844 static tree
12845 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12846 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12848 tree result = NULL_TREE;
12850 STRIP_NOPS (arg1);
12851 STRIP_NOPS (arg2);
12852 STRIP_NOPS (arg3);
12854 /* To proceed, MPFR must exactly represent the target floating point
12855 format, which only happens when the target base equals two. */
12856 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12857 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12858 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12859 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12861 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12862 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12863 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12865 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12867 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12868 const int prec = fmt->p;
12869 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12870 int inexact;
12871 mpfr_t m1, m2, m3;
12873 mpfr_inits2 (prec, m1, m2, m3, NULL);
12874 mpfr_from_real (m1, ra1, GMP_RNDN);
12875 mpfr_from_real (m2, ra2, GMP_RNDN);
12876 mpfr_from_real (m3, ra3, GMP_RNDN);
12877 mpfr_clear_flags ();
12878 inexact = func (m1, m1, m2, m3, rnd);
12879 result = do_mpfr_ckconv (m1, type, inexact);
12880 mpfr_clears (m1, m2, m3, NULL);
12884 return result;
12887 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12888 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12889 If ARG_SINP and ARG_COSP are NULL then the result is returned
12890 as a complex value.
12891 The type is taken from the type of ARG and is used for setting the
12892 precision of the calculation and results. */
12894 static tree
12895 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12897 tree const type = TREE_TYPE (arg);
12898 tree result = NULL_TREE;
12900 STRIP_NOPS (arg);
12902 /* To proceed, MPFR must exactly represent the target floating point
12903 format, which only happens when the target base equals two. */
12904 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12905 && TREE_CODE (arg) == REAL_CST
12906 && !TREE_OVERFLOW (arg))
12908 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12910 if (real_isfinite (ra))
12912 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12913 const int prec = fmt->p;
12914 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12915 tree result_s, result_c;
12916 int inexact;
12917 mpfr_t m, ms, mc;
12919 mpfr_inits2 (prec, m, ms, mc, NULL);
12920 mpfr_from_real (m, ra, GMP_RNDN);
12921 mpfr_clear_flags ();
12922 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12923 result_s = do_mpfr_ckconv (ms, type, inexact);
12924 result_c = do_mpfr_ckconv (mc, type, inexact);
12925 mpfr_clears (m, ms, mc, NULL);
12926 if (result_s && result_c)
12928 /* If we are to return in a complex value do so. */
12929 if (!arg_sinp && !arg_cosp)
12930 return build_complex (build_complex_type (type),
12931 result_c, result_s);
12933 /* Dereference the sin/cos pointer arguments. */
12934 arg_sinp = build_fold_indirect_ref (arg_sinp);
12935 arg_cosp = build_fold_indirect_ref (arg_cosp);
12936 /* Proceed if valid pointer type were passed in. */
12937 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12938 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12940 /* Set the values. */
12941 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12942 result_s);
12943 TREE_SIDE_EFFECTS (result_s) = 1;
12944 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12945 result_c);
12946 TREE_SIDE_EFFECTS (result_c) = 1;
12947 /* Combine the assignments into a compound expr. */
12948 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12949 result_s, result_c));
12954 return result;
12957 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12958 two-argument mpfr order N Bessel function FUNC on them and return
12959 the resulting value as a tree with type TYPE. The mpfr precision
12960 is set to the precision of TYPE. We assume that function FUNC
12961 returns zero if the result could be calculated exactly within the
12962 requested precision. */
12963 static tree
12964 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12965 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12966 const REAL_VALUE_TYPE *min, bool inclusive)
12968 tree result = NULL_TREE;
12970 STRIP_NOPS (arg1);
12971 STRIP_NOPS (arg2);
12973 /* To proceed, MPFR must exactly represent the target floating point
12974 format, which only happens when the target base equals two. */
12975 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12976 && host_integerp (arg1, 0)
12977 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12979 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12980 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12982 if (n == (long)n
12983 && real_isfinite (ra)
12984 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12986 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12987 const int prec = fmt->p;
12988 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12989 int inexact;
12990 mpfr_t m;
12992 mpfr_init2 (m, prec);
12993 mpfr_from_real (m, ra, GMP_RNDN);
12994 mpfr_clear_flags ();
12995 inexact = func (m, n, m, rnd);
12996 result = do_mpfr_ckconv (m, type, inexact);
12997 mpfr_clear (m);
13001 return result;
13004 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13005 the pointer *(ARG_QUO) and return the result. The type is taken
13006 from the type of ARG0 and is used for setting the precision of the
13007 calculation and results. */
13009 static tree
13010 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13012 tree const type = TREE_TYPE (arg0);
13013 tree result = NULL_TREE;
13015 STRIP_NOPS (arg0);
13016 STRIP_NOPS (arg1);
13018 /* To proceed, MPFR must exactly represent the target floating point
13019 format, which only happens when the target base equals two. */
13020 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13021 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13022 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13024 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13025 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13027 if (real_isfinite (ra0) && real_isfinite (ra1))
13029 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13030 const int prec = fmt->p;
13031 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13032 tree result_rem;
13033 long integer_quo;
13034 mpfr_t m0, m1;
13036 mpfr_inits2 (prec, m0, m1, NULL);
13037 mpfr_from_real (m0, ra0, GMP_RNDN);
13038 mpfr_from_real (m1, ra1, GMP_RNDN);
13039 mpfr_clear_flags ();
13040 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13041 /* Remquo is independent of the rounding mode, so pass
13042 inexact=0 to do_mpfr_ckconv(). */
13043 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13044 mpfr_clears (m0, m1, NULL);
13045 if (result_rem)
13047 /* MPFR calculates quo in the host's long so it may
13048 return more bits in quo than the target int can hold
13049 if sizeof(host long) > sizeof(target int). This can
13050 happen even for native compilers in LP64 mode. In
13051 these cases, modulo the quo value with the largest
13052 number that the target int can hold while leaving one
13053 bit for the sign. */
13054 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13055 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13057 /* Dereference the quo pointer argument. */
13058 arg_quo = build_fold_indirect_ref (arg_quo);
13059 /* Proceed iff a valid pointer type was passed in. */
13060 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13062 /* Set the value. */
13063 tree result_quo
13064 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13065 build_int_cst (TREE_TYPE (arg_quo),
13066 integer_quo));
13067 TREE_SIDE_EFFECTS (result_quo) = 1;
13068 /* Combine the quo assignment with the rem. */
13069 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13070 result_quo, result_rem));
13075 return result;
13078 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13079 resulting value as a tree with type TYPE. The mpfr precision is
13080 set to the precision of TYPE. We assume that this mpfr function
13081 returns zero if the result could be calculated exactly within the
13082 requested precision. In addition, the integer pointer represented
13083 by ARG_SG will be dereferenced and set to the appropriate signgam
13084 (-1,1) value. */
13086 static tree
13087 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13089 tree result = NULL_TREE;
13091 STRIP_NOPS (arg);
13093 /* To proceed, MPFR must exactly represent the target floating point
13094 format, which only happens when the target base equals two. Also
13095 verify ARG is a constant and that ARG_SG is an int pointer. */
13096 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13097 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13098 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13099 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13101 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13103 /* In addition to NaN and Inf, the argument cannot be zero or a
13104 negative integer. */
13105 if (real_isfinite (ra)
13106 && ra->cl != rvc_zero
13107 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13109 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13110 const int prec = fmt->p;
13111 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13112 int inexact, sg;
13113 mpfr_t m;
13114 tree result_lg;
13116 mpfr_init2 (m, prec);
13117 mpfr_from_real (m, ra, GMP_RNDN);
13118 mpfr_clear_flags ();
13119 inexact = mpfr_lgamma (m, &sg, m, rnd);
13120 result_lg = do_mpfr_ckconv (m, type, inexact);
13121 mpfr_clear (m);
13122 if (result_lg)
13124 tree result_sg;
13126 /* Dereference the arg_sg pointer argument. */
13127 arg_sg = build_fold_indirect_ref (arg_sg);
13128 /* Assign the signgam value into *arg_sg. */
13129 result_sg = fold_build2 (MODIFY_EXPR,
13130 TREE_TYPE (arg_sg), arg_sg,
13131 build_int_cst (TREE_TYPE (arg_sg), sg));
13132 TREE_SIDE_EFFECTS (result_sg) = 1;
13133 /* Combine the signgam assignment with the lgamma result. */
13134 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13135 result_sg, result_lg));
13140 return result;
13143 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13144 function FUNC on it and return the resulting value as a tree with
13145 type TYPE. The mpfr precision is set to the precision of TYPE. We
13146 assume that function FUNC returns zero if the result could be
13147 calculated exactly within the requested precision. */
13149 static tree
13150 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13152 tree result = NULL_TREE;
13154 STRIP_NOPS (arg);
13156 /* To proceed, MPFR must exactly represent the target floating point
13157 format, which only happens when the target base equals two. */
13158 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13159 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13160 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13162 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13163 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13165 if (real_isfinite (re) && real_isfinite (im))
13167 const struct real_format *const fmt =
13168 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13169 const int prec = fmt->p;
13170 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13171 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13172 int inexact;
13173 mpc_t m;
13175 mpc_init2 (m, prec);
13176 mpfr_from_real (mpc_realref(m), re, rnd);
13177 mpfr_from_real (mpc_imagref(m), im, rnd);
13178 mpfr_clear_flags ();
13179 inexact = func (m, m, crnd);
13180 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13181 mpc_clear (m);
13185 return result;
13188 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13189 mpc function FUNC on it and return the resulting value as a tree
13190 with type TYPE. The mpfr precision is set to the precision of
13191 TYPE. We assume that function FUNC returns zero if the result
13192 could be calculated exactly within the requested precision. If
13193 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13194 in the arguments and/or results. */
13196 tree
13197 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13198 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13200 tree result = NULL_TREE;
13202 STRIP_NOPS (arg0);
13203 STRIP_NOPS (arg1);
13205 /* To proceed, MPFR must exactly represent the target floating point
13206 format, which only happens when the target base equals two. */
13207 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13208 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13209 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13210 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13211 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13213 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13214 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13215 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13216 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13218 if (do_nonfinite
13219 || (real_isfinite (re0) && real_isfinite (im0)
13220 && real_isfinite (re1) && real_isfinite (im1)))
13222 const struct real_format *const fmt =
13223 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13224 const int prec = fmt->p;
13225 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13226 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13227 int inexact;
13228 mpc_t m0, m1;
13230 mpc_init2 (m0, prec);
13231 mpc_init2 (m1, prec);
13232 mpfr_from_real (mpc_realref(m0), re0, rnd);
13233 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13234 mpfr_from_real (mpc_realref(m1), re1, rnd);
13235 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13236 mpfr_clear_flags ();
13237 inexact = func (m0, m0, m1, crnd);
13238 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13239 mpc_clear (m0);
13240 mpc_clear (m1);
13244 return result;
13247 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13248 a normal call should be emitted rather than expanding the function
13249 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13251 static tree
13252 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13254 int nargs = gimple_call_num_args (stmt);
13256 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
13257 (nargs > 0
13258 ? gimple_call_arg_ptr (stmt, 0)
13259 : &error_mark_node), fcode);
13262 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13263 a normal call should be emitted rather than expanding the function
13264 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13265 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13266 passed as second argument. */
13268 tree
13269 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13270 enum built_in_function fcode)
13272 int nargs = gimple_call_num_args (stmt);
13274 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
13275 (nargs > 0
13276 ? gimple_call_arg_ptr (stmt, 0)
13277 : &error_mark_node), maxlen, fcode);
13280 /* Builtins with folding operations that operate on "..." arguments
13281 need special handling; we need to store the arguments in a convenient
13282 data structure before attempting any folding. Fortunately there are
13283 only a few builtins that fall into this category. FNDECL is the
13284 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13285 result of the function call is ignored. */
13287 static tree
13288 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13289 bool ignore ATTRIBUTE_UNUSED)
13291 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13292 tree ret = NULL_TREE;
13294 switch (fcode)
13296 case BUILT_IN_SPRINTF_CHK:
13297 case BUILT_IN_VSPRINTF_CHK:
13298 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13299 break;
13301 case BUILT_IN_SNPRINTF_CHK:
13302 case BUILT_IN_VSNPRINTF_CHK:
13303 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13305 default:
13306 break;
13308 if (ret)
13310 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13311 TREE_NO_WARNING (ret) = 1;
13312 return ret;
13314 return NULL_TREE;
13317 /* A wrapper function for builtin folding that prevents warnings for
13318 "statement without effect" and the like, caused by removing the
13319 call node earlier than the warning is generated. */
13321 tree
13322 fold_call_stmt (gimple stmt, bool ignore)
13324 tree ret = NULL_TREE;
13325 tree fndecl = gimple_call_fndecl (stmt);
13326 location_t loc = gimple_location (stmt);
13327 if (fndecl
13328 && TREE_CODE (fndecl) == FUNCTION_DECL
13329 && DECL_BUILT_IN (fndecl)
13330 && !gimple_call_va_arg_pack_p (stmt))
13332 int nargs = gimple_call_num_args (stmt);
13333 tree *args = (nargs > 0
13334 ? gimple_call_arg_ptr (stmt, 0)
13335 : &error_mark_node);
13337 if (avoid_folding_inline_builtin (fndecl))
13338 return NULL_TREE;
13339 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13341 return targetm.fold_builtin (fndecl, nargs, args, ignore);
13343 else
13345 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13346 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13347 if (!ret)
13348 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13349 if (ret)
13351 /* Propagate location information from original call to
13352 expansion of builtin. Otherwise things like
13353 maybe_emit_chk_warning, that operate on the expansion
13354 of a builtin, will use the wrong location information. */
13355 if (gimple_has_location (stmt))
13357 tree realret = ret;
13358 if (TREE_CODE (ret) == NOP_EXPR)
13359 realret = TREE_OPERAND (ret, 0);
13360 if (CAN_HAVE_LOCATION_P (realret)
13361 && !EXPR_HAS_LOCATION (realret))
13362 SET_EXPR_LOCATION (realret, loc);
13363 return realret;
13365 return ret;
13369 return NULL_TREE;
13372 /* Look up the function in built_in_decls that corresponds to DECL
13373 and set ASMSPEC as its user assembler name. DECL must be a
13374 function decl that declares a builtin. */
13376 void
13377 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13379 tree builtin;
13380 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13381 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13382 && asmspec != 0);
13384 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13385 set_user_assembler_name (builtin, asmspec);
13386 switch (DECL_FUNCTION_CODE (decl))
13388 case BUILT_IN_MEMCPY:
13389 init_block_move_fn (asmspec);
13390 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13391 break;
13392 case BUILT_IN_MEMSET:
13393 init_block_clear_fn (asmspec);
13394 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13395 break;
13396 case BUILT_IN_MEMMOVE:
13397 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13398 break;
13399 case BUILT_IN_MEMCMP:
13400 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13401 break;
13402 case BUILT_IN_ABORT:
13403 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13404 break;
13405 case BUILT_IN_FFS:
13406 if (INT_TYPE_SIZE < BITS_PER_WORD)
13408 set_user_assembler_libfunc ("ffs", asmspec);
13409 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13410 MODE_INT, 0), "ffs");
13412 break;
13413 default:
13414 break;
13418 /* Return true if DECL is a builtin that expands to a constant or similarly
13419 simple code. */
13420 bool
13421 is_simple_builtin (tree decl)
13423 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13424 switch (DECL_FUNCTION_CODE (decl))
13426 /* Builtins that expand to constants. */
13427 case BUILT_IN_CONSTANT_P:
13428 case BUILT_IN_EXPECT:
13429 case BUILT_IN_OBJECT_SIZE:
13430 case BUILT_IN_UNREACHABLE:
13431 /* Simple register moves or loads from stack. */
13432 case BUILT_IN_RETURN_ADDRESS:
13433 case BUILT_IN_EXTRACT_RETURN_ADDR:
13434 case BUILT_IN_FROB_RETURN_ADDR:
13435 case BUILT_IN_RETURN:
13436 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13437 case BUILT_IN_FRAME_ADDRESS:
13438 case BUILT_IN_VA_END:
13439 case BUILT_IN_STACK_SAVE:
13440 case BUILT_IN_STACK_RESTORE:
13441 /* Exception state returns or moves registers around. */
13442 case BUILT_IN_EH_FILTER:
13443 case BUILT_IN_EH_POINTER:
13444 case BUILT_IN_EH_COPY_VALUES:
13445 return true;
13447 default:
13448 return false;
13451 return false;
13454 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13455 most probably expanded inline into reasonably simple code. This is a
13456 superset of is_simple_builtin. */
13457 bool
13458 is_inexpensive_builtin (tree decl)
13460 if (!decl)
13461 return false;
13462 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13463 return true;
13464 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13465 switch (DECL_FUNCTION_CODE (decl))
13467 case BUILT_IN_ABS:
13468 case BUILT_IN_ALLOCA:
13469 case BUILT_IN_BSWAP32:
13470 case BUILT_IN_BSWAP64:
13471 case BUILT_IN_CLZ:
13472 case BUILT_IN_CLZIMAX:
13473 case BUILT_IN_CLZL:
13474 case BUILT_IN_CLZLL:
13475 case BUILT_IN_CTZ:
13476 case BUILT_IN_CTZIMAX:
13477 case BUILT_IN_CTZL:
13478 case BUILT_IN_CTZLL:
13479 case BUILT_IN_FFS:
13480 case BUILT_IN_FFSIMAX:
13481 case BUILT_IN_FFSL:
13482 case BUILT_IN_FFSLL:
13483 case BUILT_IN_IMAXABS:
13484 case BUILT_IN_FINITE:
13485 case BUILT_IN_FINITEF:
13486 case BUILT_IN_FINITEL:
13487 case BUILT_IN_FINITED32:
13488 case BUILT_IN_FINITED64:
13489 case BUILT_IN_FINITED128:
13490 case BUILT_IN_FPCLASSIFY:
13491 case BUILT_IN_ISFINITE:
13492 case BUILT_IN_ISINF_SIGN:
13493 case BUILT_IN_ISINF:
13494 case BUILT_IN_ISINFF:
13495 case BUILT_IN_ISINFL:
13496 case BUILT_IN_ISINFD32:
13497 case BUILT_IN_ISINFD64:
13498 case BUILT_IN_ISINFD128:
13499 case BUILT_IN_ISNAN:
13500 case BUILT_IN_ISNANF:
13501 case BUILT_IN_ISNANL:
13502 case BUILT_IN_ISNAND32:
13503 case BUILT_IN_ISNAND64:
13504 case BUILT_IN_ISNAND128:
13505 case BUILT_IN_ISNORMAL:
13506 case BUILT_IN_ISGREATER:
13507 case BUILT_IN_ISGREATEREQUAL:
13508 case BUILT_IN_ISLESS:
13509 case BUILT_IN_ISLESSEQUAL:
13510 case BUILT_IN_ISLESSGREATER:
13511 case BUILT_IN_ISUNORDERED:
13512 case BUILT_IN_VA_ARG_PACK:
13513 case BUILT_IN_VA_ARG_PACK_LEN:
13514 case BUILT_IN_VA_COPY:
13515 case BUILT_IN_TRAP:
13516 case BUILT_IN_SAVEREGS:
13517 case BUILT_IN_POPCOUNTL:
13518 case BUILT_IN_POPCOUNTLL:
13519 case BUILT_IN_POPCOUNTIMAX:
13520 case BUILT_IN_POPCOUNT:
13521 case BUILT_IN_PARITYL:
13522 case BUILT_IN_PARITYLL:
13523 case BUILT_IN_PARITYIMAX:
13524 case BUILT_IN_PARITY:
13525 case BUILT_IN_LABS:
13526 case BUILT_IN_LLABS:
13527 case BUILT_IN_PREFETCH:
13528 return true;
13530 default:
13531 return is_simple_builtin (decl);
13534 return false;