2011-04-23 Tobias Burnus <burnus@net-b.de>
[official-gcc.git] / gcc / builtins.c
blobb2534ce5850d678d81489dd6b1757546d9cf67c6
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
52 #include "builtins.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
57 #endif
58 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
60 struct target_builtins default_target_builtins;
61 #if SWITCHABLE_TARGET
62 struct target_builtins *this_target_builtins = &default_target_builtins;
63 #endif
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
74 #undef DEF_BUILTIN
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
92 #endif
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
106 static rtx expand_builtin_interclass_mathfn (tree, rtx);
107 static rtx expand_builtin_sincos (tree);
108 static rtx expand_builtin_cexpi (tree, rtx);
109 static rtx expand_builtin_int_roundingfn (tree, rtx);
110 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcmp (tree, rtx);
117 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_memcpy (tree, rtx);
120 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
122 enum machine_mode, int);
123 static rtx expand_builtin_strcpy (tree, rtx);
124 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
125 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strncpy (tree, rtx);
127 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
128 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
130 static rtx expand_builtin_bzero (tree);
131 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_alloca (tree, bool);
133 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
134 static rtx expand_builtin_frame_address (tree, tree);
135 static tree stabilize_va_list_loc (location_t, tree, int);
136 static rtx expand_builtin_expect (tree, rtx);
137 static tree fold_builtin_constant_p (tree);
138 static tree fold_builtin_expect (location_t, tree, tree);
139 static tree fold_builtin_classify_type (tree);
140 static tree fold_builtin_strlen (location_t, tree, tree);
141 static tree fold_builtin_inf (location_t, tree, int);
142 static tree fold_builtin_nan (tree, tree, int);
143 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
144 static bool validate_arg (const_tree, enum tree_code code);
145 static bool integer_valued_real_p (tree);
146 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
147 static bool readonly_data_expr (tree);
148 static rtx expand_builtin_fabs (tree, rtx, rtx);
149 static rtx expand_builtin_signbit (tree, rtx);
150 static tree fold_builtin_sqrt (location_t, tree, tree);
151 static tree fold_builtin_cbrt (location_t, tree, tree);
152 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
153 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
154 static tree fold_builtin_cos (location_t, tree, tree, tree);
155 static tree fold_builtin_cosh (location_t, tree, tree, tree);
156 static tree fold_builtin_tan (tree, tree);
157 static tree fold_builtin_trunc (location_t, tree, tree);
158 static tree fold_builtin_floor (location_t, tree, tree);
159 static tree fold_builtin_ceil (location_t, tree, tree);
160 static tree fold_builtin_round (location_t, tree, tree);
161 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
162 static tree fold_builtin_bitop (tree, tree);
163 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
164 static tree fold_builtin_strchr (location_t, tree, tree, tree);
165 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
167 static tree fold_builtin_strcmp (location_t, tree, tree);
168 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
169 static tree fold_builtin_signbit (location_t, tree, tree);
170 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_isascii (location_t, tree);
172 static tree fold_builtin_toascii (location_t, tree);
173 static tree fold_builtin_isdigit (location_t, tree);
174 static tree fold_builtin_fabs (location_t, tree, tree);
175 static tree fold_builtin_abs (location_t, tree, tree);
176 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
177 enum tree_code);
178 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
179 static tree fold_builtin_0 (location_t, tree, bool);
180 static tree fold_builtin_1 (location_t, tree, tree, bool);
181 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
182 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
183 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
184 static tree fold_builtin_varargs (location_t, tree, tree, bool);
186 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
187 static tree fold_builtin_strstr (location_t, tree, tree, tree);
188 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
189 static tree fold_builtin_strcat (location_t, tree, tree);
190 static tree fold_builtin_strncat (location_t, tree, tree, tree);
191 static tree fold_builtin_strspn (location_t, tree, tree);
192 static tree fold_builtin_strcspn (location_t, tree, tree);
193 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
194 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
196 static rtx expand_builtin_object_size (tree);
197 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
198 enum built_in_function);
199 static void maybe_emit_chk_warning (tree, enum built_in_function);
200 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
201 static void maybe_emit_free_warning (tree);
202 static tree fold_builtin_object_size (tree, tree);
203 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
204 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
205 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
206 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
207 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
208 enum built_in_function);
209 static bool init_target_chars (void);
211 static unsigned HOST_WIDE_INT target_newline;
212 static unsigned HOST_WIDE_INT target_percent;
213 static unsigned HOST_WIDE_INT target_c;
214 static unsigned HOST_WIDE_INT target_s;
215 static char target_percent_c[3];
216 static char target_percent_s[3];
217 static char target_percent_s_newline[4];
218 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
219 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
220 static tree do_mpfr_arg2 (tree, tree, tree,
221 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
222 static tree do_mpfr_arg3 (tree, tree, tree, tree,
223 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
224 static tree do_mpfr_sincos (tree, tree, tree);
225 static tree do_mpfr_bessel_n (tree, tree, tree,
226 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_remquo (tree, tree, tree);
229 static tree do_mpfr_lgamma_r (tree, tree, tree);
231 /* Return true if NAME starts with __builtin_ or __sync_. */
233 bool
234 is_builtin_name (const char *name)
236 if (strncmp (name, "__builtin_", 10) == 0)
237 return true;
238 if (strncmp (name, "__sync_", 7) == 0)
239 return true;
240 return false;
244 /* Return true if DECL is a function symbol representing a built-in. */
246 bool
247 is_builtin_fn (tree decl)
249 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
253 /* Return true if NODE should be considered for inline expansion regardless
254 of the optimization level. This means whenever a function is invoked with
255 its "internal" name, which normally contains the prefix "__builtin". */
257 static bool
258 called_as_built_in (tree node)
260 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
261 we want the name used to call the function, not the name it
262 will have. */
263 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
264 return is_builtin_name (name);
267 /* Return the alignment in bits of EXP, an object.
268 Don't return more than MAX_ALIGN no matter what. */
270 unsigned int
271 get_object_alignment_1 (tree exp, unsigned HOST_WIDE_INT *bitposp)
273 HOST_WIDE_INT bitsize, bitpos;
274 tree offset;
275 enum machine_mode mode;
276 int unsignedp, volatilep;
277 unsigned int align, inner;
279 /* Get the innermost object and the constant (bitpos) and possibly
280 variable (offset) offset of the access. */
281 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
282 &mode, &unsignedp, &volatilep, true);
284 /* Extract alignment information from the innermost object and
285 possibly adjust bitpos and offset. */
286 if (TREE_CODE (exp) == CONST_DECL)
287 exp = DECL_INITIAL (exp);
288 if (DECL_P (exp)
289 && TREE_CODE (exp) != LABEL_DECL)
290 align = DECL_ALIGN (exp);
291 else if (CONSTANT_CLASS_P (exp))
293 align = TYPE_ALIGN (TREE_TYPE (exp));
294 #ifdef CONSTANT_ALIGNMENT
295 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
296 #endif
298 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
299 align = TYPE_ALIGN (TREE_TYPE (exp));
300 else if (TREE_CODE (exp) == INDIRECT_REF)
301 align = TYPE_ALIGN (TREE_TYPE (exp));
302 else if (TREE_CODE (exp) == MEM_REF)
304 tree addr = TREE_OPERAND (exp, 0);
305 struct ptr_info_def *pi;
306 if (TREE_CODE (addr) == BIT_AND_EXPR
307 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
309 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
310 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
311 align *= BITS_PER_UNIT;
312 addr = TREE_OPERAND (addr, 0);
314 else
315 align = BITS_PER_UNIT;
316 if (TREE_CODE (addr) == SSA_NAME
317 && (pi = SSA_NAME_PTR_INFO (addr)))
319 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
320 align = MAX (pi->align * BITS_PER_UNIT, align);
322 else if (TREE_CODE (addr) == ADDR_EXPR)
323 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0), ~0U));
324 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
326 else if (TREE_CODE (exp) == TARGET_MEM_REF)
328 struct ptr_info_def *pi;
329 tree addr = TMR_BASE (exp);
330 if (TREE_CODE (addr) == BIT_AND_EXPR
331 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
333 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
334 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
335 align *= BITS_PER_UNIT;
336 addr = TREE_OPERAND (addr, 0);
338 else
339 align = BITS_PER_UNIT;
340 if (TREE_CODE (addr) == SSA_NAME
341 && (pi = SSA_NAME_PTR_INFO (addr)))
343 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
344 align = MAX (pi->align * BITS_PER_UNIT, align);
346 else if (TREE_CODE (addr) == ADDR_EXPR)
347 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0), ~0U));
348 if (TMR_OFFSET (exp))
349 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
350 if (TMR_INDEX (exp) && TMR_STEP (exp))
352 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
353 align = MIN (align, (step & -step) * BITS_PER_UNIT);
355 else if (TMR_INDEX (exp))
356 align = BITS_PER_UNIT;
357 if (TMR_INDEX2 (exp))
358 align = BITS_PER_UNIT;
360 else
361 align = BITS_PER_UNIT;
363 /* If there is a non-constant offset part extract the maximum
364 alignment that can prevail. */
365 inner = ~0U;
366 while (offset)
368 tree next_offset;
370 if (TREE_CODE (offset) == PLUS_EXPR)
372 next_offset = TREE_OPERAND (offset, 0);
373 offset = TREE_OPERAND (offset, 1);
375 else
376 next_offset = NULL;
377 if (host_integerp (offset, 1))
379 /* Any overflow in calculating offset_bits won't change
380 the alignment. */
381 unsigned offset_bits
382 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
384 if (offset_bits)
385 inner = MIN (inner, (offset_bits & -offset_bits));
387 else if (TREE_CODE (offset) == MULT_EXPR
388 && host_integerp (TREE_OPERAND (offset, 1), 1))
390 /* Any overflow in calculating offset_factor won't change
391 the alignment. */
392 unsigned offset_factor
393 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
394 * BITS_PER_UNIT);
396 if (offset_factor)
397 inner = MIN (inner, (offset_factor & -offset_factor));
399 else
401 inner = MIN (inner, BITS_PER_UNIT);
402 break;
404 offset = next_offset;
407 /* Alignment is innermost object alignment adjusted by the constant
408 and non-constant offset parts. */
409 align = MIN (align, inner);
410 bitpos = bitpos & (align - 1);
412 *bitposp = bitpos;
413 return align;
416 /* Return the alignment in bits of EXP, an object.
417 Don't return more than MAX_ALIGN no matter what. */
419 unsigned int
420 get_object_alignment (tree exp, unsigned int max_align)
422 unsigned HOST_WIDE_INT bitpos = 0;
423 unsigned int align;
425 align = get_object_alignment_1 (exp, &bitpos);
427 /* align and bitpos now specify known low bits of the pointer.
428 ptr & (align - 1) == bitpos. */
430 if (bitpos != 0)
431 align = (bitpos & -bitpos);
433 return MIN (align, max_align);
436 /* Returns true iff we can trust that alignment information has been
437 calculated properly. */
439 bool
440 can_trust_pointer_alignment (void)
442 /* We rely on TER to compute accurate alignment information. */
443 return (optimize && flag_tree_ter);
446 /* Return the alignment in bits of EXP, a pointer valued expression.
447 But don't return more than MAX_ALIGN no matter what.
448 The alignment returned is, by default, the alignment of the thing that
449 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
451 Otherwise, look at the expression to see if we can do better, i.e., if the
452 expression is actually pointing at an object whose alignment is tighter. */
454 unsigned int
455 get_pointer_alignment (tree exp, unsigned int max_align)
457 STRIP_NOPS (exp);
459 if (TREE_CODE (exp) == ADDR_EXPR)
460 return get_object_alignment (TREE_OPERAND (exp, 0), max_align);
461 else if (TREE_CODE (exp) == SSA_NAME
462 && POINTER_TYPE_P (TREE_TYPE (exp)))
464 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
465 unsigned align;
466 if (!pi)
467 return BITS_PER_UNIT;
468 if (pi->misalign != 0)
469 align = (pi->misalign & -pi->misalign);
470 else
471 align = pi->align;
472 return MIN (max_align, align * BITS_PER_UNIT);
475 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
478 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
479 way, because it could contain a zero byte in the middle.
480 TREE_STRING_LENGTH is the size of the character array, not the string.
482 ONLY_VALUE should be nonzero if the result is not going to be emitted
483 into the instruction stream and zero if it is going to be expanded.
484 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
485 is returned, otherwise NULL, since
486 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
487 evaluate the side-effects.
489 The value returned is of type `ssizetype'.
491 Unfortunately, string_constant can't access the values of const char
492 arrays with initializers, so neither can we do so here. */
494 tree
495 c_strlen (tree src, int only_value)
497 tree offset_node;
498 HOST_WIDE_INT offset;
499 int max;
500 const char *ptr;
501 location_t loc;
503 STRIP_NOPS (src);
504 if (TREE_CODE (src) == COND_EXPR
505 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
507 tree len1, len2;
509 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
510 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
511 if (tree_int_cst_equal (len1, len2))
512 return len1;
515 if (TREE_CODE (src) == COMPOUND_EXPR
516 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
517 return c_strlen (TREE_OPERAND (src, 1), only_value);
519 loc = EXPR_LOC_OR_HERE (src);
521 src = string_constant (src, &offset_node);
522 if (src == 0)
523 return NULL_TREE;
525 max = TREE_STRING_LENGTH (src) - 1;
526 ptr = TREE_STRING_POINTER (src);
528 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
530 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
531 compute the offset to the following null if we don't know where to
532 start searching for it. */
533 int i;
535 for (i = 0; i < max; i++)
536 if (ptr[i] == 0)
537 return NULL_TREE;
539 /* We don't know the starting offset, but we do know that the string
540 has no internal zero bytes. We can assume that the offset falls
541 within the bounds of the string; otherwise, the programmer deserves
542 what he gets. Subtract the offset from the length of the string,
543 and return that. This would perhaps not be valid if we were dealing
544 with named arrays in addition to literal string constants. */
546 return size_diffop_loc (loc, size_int (max), offset_node);
549 /* We have a known offset into the string. Start searching there for
550 a null character if we can represent it as a single HOST_WIDE_INT. */
551 if (offset_node == 0)
552 offset = 0;
553 else if (! host_integerp (offset_node, 0))
554 offset = -1;
555 else
556 offset = tree_low_cst (offset_node, 0);
558 /* If the offset is known to be out of bounds, warn, and call strlen at
559 runtime. */
560 if (offset < 0 || offset > max)
562 /* Suppress multiple warnings for propagated constant strings. */
563 if (! TREE_NO_WARNING (src))
565 warning_at (loc, 0, "offset outside bounds of constant string");
566 TREE_NO_WARNING (src) = 1;
568 return NULL_TREE;
571 /* Use strlen to search for the first zero byte. Since any strings
572 constructed with build_string will have nulls appended, we win even
573 if we get handed something like (char[4])"abcd".
575 Since OFFSET is our starting index into the string, no further
576 calculation is needed. */
577 return ssize_int (strlen (ptr + offset));
580 /* Return a char pointer for a C string if it is a string constant
581 or sum of string constant and integer constant. */
583 static const char *
584 c_getstr (tree src)
586 tree offset_node;
588 src = string_constant (src, &offset_node);
589 if (src == 0)
590 return 0;
592 if (offset_node == 0)
593 return TREE_STRING_POINTER (src);
594 else if (!host_integerp (offset_node, 1)
595 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
596 return 0;
598 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
601 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
602 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
604 static rtx
605 c_readstr (const char *str, enum machine_mode mode)
607 HOST_WIDE_INT c[2];
608 HOST_WIDE_INT ch;
609 unsigned int i, j;
611 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
613 c[0] = 0;
614 c[1] = 0;
615 ch = 1;
616 for (i = 0; i < GET_MODE_SIZE (mode); i++)
618 j = i;
619 if (WORDS_BIG_ENDIAN)
620 j = GET_MODE_SIZE (mode) - i - 1;
621 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
622 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
623 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
624 j *= BITS_PER_UNIT;
625 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
627 if (ch)
628 ch = (unsigned char) str[i];
629 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
631 return immed_double_const (c[0], c[1], mode);
634 /* Cast a target constant CST to target CHAR and if that value fits into
635 host char type, return zero and put that value into variable pointed to by
636 P. */
638 static int
639 target_char_cast (tree cst, char *p)
641 unsigned HOST_WIDE_INT val, hostval;
643 if (TREE_CODE (cst) != INTEGER_CST
644 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
645 return 1;
647 val = TREE_INT_CST_LOW (cst);
648 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
649 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
651 hostval = val;
652 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
653 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
655 if (val != hostval)
656 return 1;
658 *p = hostval;
659 return 0;
662 /* Similar to save_expr, but assumes that arbitrary code is not executed
663 in between the multiple evaluations. In particular, we assume that a
664 non-addressable local variable will not be modified. */
666 static tree
667 builtin_save_expr (tree exp)
669 if (TREE_CODE (exp) == SSA_NAME
670 || (TREE_ADDRESSABLE (exp) == 0
671 && (TREE_CODE (exp) == PARM_DECL
672 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
673 return exp;
675 return save_expr (exp);
678 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
679 times to get the address of either a higher stack frame, or a return
680 address located within it (depending on FNDECL_CODE). */
682 static rtx
683 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
685 int i;
687 #ifdef INITIAL_FRAME_ADDRESS_RTX
688 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
689 #else
690 rtx tem;
692 /* For a zero count with __builtin_return_address, we don't care what
693 frame address we return, because target-specific definitions will
694 override us. Therefore frame pointer elimination is OK, and using
695 the soft frame pointer is OK.
697 For a nonzero count, or a zero count with __builtin_frame_address,
698 we require a stable offset from the current frame pointer to the
699 previous one, so we must use the hard frame pointer, and
700 we must disable frame pointer elimination. */
701 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
702 tem = frame_pointer_rtx;
703 else
705 tem = hard_frame_pointer_rtx;
707 /* Tell reload not to eliminate the frame pointer. */
708 crtl->accesses_prior_frames = 1;
710 #endif
712 /* Some machines need special handling before we can access
713 arbitrary frames. For example, on the SPARC, we must first flush
714 all register windows to the stack. */
715 #ifdef SETUP_FRAME_ADDRESSES
716 if (count > 0)
717 SETUP_FRAME_ADDRESSES ();
718 #endif
720 /* On the SPARC, the return address is not in the frame, it is in a
721 register. There is no way to access it off of the current frame
722 pointer, but it can be accessed off the previous frame pointer by
723 reading the value from the register window save area. */
724 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
725 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
726 count--;
727 #endif
729 /* Scan back COUNT frames to the specified frame. */
730 for (i = 0; i < count; i++)
732 /* Assume the dynamic chain pointer is in the word that the
733 frame address points to, unless otherwise specified. */
734 #ifdef DYNAMIC_CHAIN_ADDRESS
735 tem = DYNAMIC_CHAIN_ADDRESS (tem);
736 #endif
737 tem = memory_address (Pmode, tem);
738 tem = gen_frame_mem (Pmode, tem);
739 tem = copy_to_reg (tem);
742 /* For __builtin_frame_address, return what we've got. But, on
743 the SPARC for example, we may have to add a bias. */
744 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
745 #ifdef FRAME_ADDR_RTX
746 return FRAME_ADDR_RTX (tem);
747 #else
748 return tem;
749 #endif
751 /* For __builtin_return_address, get the return address from that frame. */
752 #ifdef RETURN_ADDR_RTX
753 tem = RETURN_ADDR_RTX (count, tem);
754 #else
755 tem = memory_address (Pmode,
756 plus_constant (tem, GET_MODE_SIZE (Pmode)));
757 tem = gen_frame_mem (Pmode, tem);
758 #endif
759 return tem;
762 /* Alias set used for setjmp buffer. */
763 static alias_set_type setjmp_alias_set = -1;
765 /* Construct the leading half of a __builtin_setjmp call. Control will
766 return to RECEIVER_LABEL. This is also called directly by the SJLJ
767 exception handling code. */
769 void
770 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
772 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
773 rtx stack_save;
774 rtx mem;
776 if (setjmp_alias_set == -1)
777 setjmp_alias_set = new_alias_set ();
779 buf_addr = convert_memory_address (Pmode, buf_addr);
781 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
783 /* We store the frame pointer and the address of receiver_label in
784 the buffer and use the rest of it for the stack save area, which
785 is machine-dependent. */
787 mem = gen_rtx_MEM (Pmode, buf_addr);
788 set_mem_alias_set (mem, setjmp_alias_set);
789 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
791 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
792 set_mem_alias_set (mem, setjmp_alias_set);
794 emit_move_insn (validize_mem (mem),
795 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
797 stack_save = gen_rtx_MEM (sa_mode,
798 plus_constant (buf_addr,
799 2 * GET_MODE_SIZE (Pmode)));
800 set_mem_alias_set (stack_save, setjmp_alias_set);
801 emit_stack_save (SAVE_NONLOCAL, &stack_save);
803 /* If there is further processing to do, do it. */
804 #ifdef HAVE_builtin_setjmp_setup
805 if (HAVE_builtin_setjmp_setup)
806 emit_insn (gen_builtin_setjmp_setup (buf_addr));
807 #endif
809 /* Tell optimize_save_area_alloca that extra work is going to
810 need to go on during alloca. */
811 cfun->calls_setjmp = 1;
813 /* We have a nonlocal label. */
814 cfun->has_nonlocal_label = 1;
817 /* Construct the trailing part of a __builtin_setjmp call. This is
818 also called directly by the SJLJ exception handling code. */
820 void
821 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
823 rtx chain;
825 /* Clobber the FP when we get here, so we have to make sure it's
826 marked as used by this function. */
827 emit_use (hard_frame_pointer_rtx);
829 /* Mark the static chain as clobbered here so life information
830 doesn't get messed up for it. */
831 chain = targetm.calls.static_chain (current_function_decl, true);
832 if (chain && REG_P (chain))
833 emit_clobber (chain);
835 /* Now put in the code to restore the frame pointer, and argument
836 pointer, if needed. */
837 #ifdef HAVE_nonlocal_goto
838 if (! HAVE_nonlocal_goto)
839 #endif
841 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
842 /* This might change the hard frame pointer in ways that aren't
843 apparent to early optimization passes, so force a clobber. */
844 emit_clobber (hard_frame_pointer_rtx);
847 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
848 if (fixed_regs[ARG_POINTER_REGNUM])
850 #ifdef ELIMINABLE_REGS
851 size_t i;
852 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
854 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
855 if (elim_regs[i].from == ARG_POINTER_REGNUM
856 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
857 break;
859 if (i == ARRAY_SIZE (elim_regs))
860 #endif
862 /* Now restore our arg pointer from the address at which it
863 was saved in our stack frame. */
864 emit_move_insn (crtl->args.internal_arg_pointer,
865 copy_to_reg (get_arg_pointer_save_area ()));
868 #endif
870 #ifdef HAVE_builtin_setjmp_receiver
871 if (HAVE_builtin_setjmp_receiver)
872 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
873 else
874 #endif
875 #ifdef HAVE_nonlocal_goto_receiver
876 if (HAVE_nonlocal_goto_receiver)
877 emit_insn (gen_nonlocal_goto_receiver ());
878 else
879 #endif
880 { /* Nothing */ }
882 /* We must not allow the code we just generated to be reordered by
883 scheduling. Specifically, the update of the frame pointer must
884 happen immediately, not later. */
885 emit_insn (gen_blockage ());
888 /* __builtin_longjmp is passed a pointer to an array of five words (not
889 all will be used on all machines). It operates similarly to the C
890 library function of the same name, but is more efficient. Much of
891 the code below is copied from the handling of non-local gotos. */
893 static void
894 expand_builtin_longjmp (rtx buf_addr, rtx value)
896 rtx fp, lab, stack, insn, last;
897 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
899 /* DRAP is needed for stack realign if longjmp is expanded to current
900 function */
901 if (SUPPORTS_STACK_ALIGNMENT)
902 crtl->need_drap = true;
904 if (setjmp_alias_set == -1)
905 setjmp_alias_set = new_alias_set ();
907 buf_addr = convert_memory_address (Pmode, buf_addr);
909 buf_addr = force_reg (Pmode, buf_addr);
911 /* We require that the user must pass a second argument of 1, because
912 that is what builtin_setjmp will return. */
913 gcc_assert (value == const1_rtx);
915 last = get_last_insn ();
916 #ifdef HAVE_builtin_longjmp
917 if (HAVE_builtin_longjmp)
918 emit_insn (gen_builtin_longjmp (buf_addr));
919 else
920 #endif
922 fp = gen_rtx_MEM (Pmode, buf_addr);
923 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
924 GET_MODE_SIZE (Pmode)));
926 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
927 2 * GET_MODE_SIZE (Pmode)));
928 set_mem_alias_set (fp, setjmp_alias_set);
929 set_mem_alias_set (lab, setjmp_alias_set);
930 set_mem_alias_set (stack, setjmp_alias_set);
932 /* Pick up FP, label, and SP from the block and jump. This code is
933 from expand_goto in stmt.c; see there for detailed comments. */
934 #ifdef HAVE_nonlocal_goto
935 if (HAVE_nonlocal_goto)
936 /* We have to pass a value to the nonlocal_goto pattern that will
937 get copied into the static_chain pointer, but it does not matter
938 what that value is, because builtin_setjmp does not use it. */
939 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
940 else
941 #endif
943 lab = copy_to_reg (lab);
945 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
946 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
948 emit_move_insn (hard_frame_pointer_rtx, fp);
949 emit_stack_restore (SAVE_NONLOCAL, stack);
951 emit_use (hard_frame_pointer_rtx);
952 emit_use (stack_pointer_rtx);
953 emit_indirect_jump (lab);
957 /* Search backwards and mark the jump insn as a non-local goto.
958 Note that this precludes the use of __builtin_longjmp to a
959 __builtin_setjmp target in the same function. However, we've
960 already cautioned the user that these functions are for
961 internal exception handling use only. */
962 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
964 gcc_assert (insn != last);
966 if (JUMP_P (insn))
968 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
969 break;
971 else if (CALL_P (insn))
972 break;
976 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
977 and the address of the save area. */
979 static rtx
980 expand_builtin_nonlocal_goto (tree exp)
982 tree t_label, t_save_area;
983 rtx r_label, r_save_area, r_fp, r_sp, insn;
985 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
986 return NULL_RTX;
988 t_label = CALL_EXPR_ARG (exp, 0);
989 t_save_area = CALL_EXPR_ARG (exp, 1);
991 r_label = expand_normal (t_label);
992 r_label = convert_memory_address (Pmode, r_label);
993 r_save_area = expand_normal (t_save_area);
994 r_save_area = convert_memory_address (Pmode, r_save_area);
995 /* Copy the address of the save location to a register just in case it was based
996 on the frame pointer. */
997 r_save_area = copy_to_reg (r_save_area);
998 r_fp = gen_rtx_MEM (Pmode, r_save_area);
999 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1000 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
1002 crtl->has_nonlocal_goto = 1;
1004 #ifdef HAVE_nonlocal_goto
1005 /* ??? We no longer need to pass the static chain value, afaik. */
1006 if (HAVE_nonlocal_goto)
1007 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1008 else
1009 #endif
1011 r_label = copy_to_reg (r_label);
1013 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1014 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1016 /* Restore frame pointer for containing function.
1017 This sets the actual hard register used for the frame pointer
1018 to the location of the function's incoming static chain info.
1019 The non-local goto handler will then adjust it to contain the
1020 proper value and reload the argument pointer, if needed. */
1021 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1022 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1024 /* USE of hard_frame_pointer_rtx added for consistency;
1025 not clear if really needed. */
1026 emit_use (hard_frame_pointer_rtx);
1027 emit_use (stack_pointer_rtx);
1029 /* If the architecture is using a GP register, we must
1030 conservatively assume that the target function makes use of it.
1031 The prologue of functions with nonlocal gotos must therefore
1032 initialize the GP register to the appropriate value, and we
1033 must then make sure that this value is live at the point
1034 of the jump. (Note that this doesn't necessarily apply
1035 to targets with a nonlocal_goto pattern; they are free
1036 to implement it in their own way. Note also that this is
1037 a no-op if the GP register is a global invariant.) */
1038 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1039 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1040 emit_use (pic_offset_table_rtx);
1042 emit_indirect_jump (r_label);
1045 /* Search backwards to the jump insn and mark it as a
1046 non-local goto. */
1047 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1049 if (JUMP_P (insn))
1051 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1052 break;
1054 else if (CALL_P (insn))
1055 break;
1058 return const0_rtx;
1061 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1062 (not all will be used on all machines) that was passed to __builtin_setjmp.
1063 It updates the stack pointer in that block to correspond to the current
1064 stack pointer. */
1066 static void
1067 expand_builtin_update_setjmp_buf (rtx buf_addr)
1069 enum machine_mode sa_mode = Pmode;
1070 rtx stack_save;
1073 #ifdef HAVE_save_stack_nonlocal
1074 if (HAVE_save_stack_nonlocal)
1075 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1076 #endif
1077 #ifdef STACK_SAVEAREA_MODE
1078 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1079 #endif
1081 stack_save
1082 = gen_rtx_MEM (sa_mode,
1083 memory_address
1084 (sa_mode,
1085 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1087 #ifdef HAVE_setjmp
1088 if (HAVE_setjmp)
1089 emit_insn (gen_setjmp ());
1090 #endif
1092 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1095 /* Expand a call to __builtin_prefetch. For a target that does not support
1096 data prefetch, evaluate the memory address argument in case it has side
1097 effects. */
1099 static void
1100 expand_builtin_prefetch (tree exp)
1102 tree arg0, arg1, arg2;
1103 int nargs;
1104 rtx op0, op1, op2;
1106 if (!validate_arglist (exp, POINTER_TYPE, 0))
1107 return;
1109 arg0 = CALL_EXPR_ARG (exp, 0);
1111 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1112 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1113 locality). */
1114 nargs = call_expr_nargs (exp);
1115 if (nargs > 1)
1116 arg1 = CALL_EXPR_ARG (exp, 1);
1117 else
1118 arg1 = integer_zero_node;
1119 if (nargs > 2)
1120 arg2 = CALL_EXPR_ARG (exp, 2);
1121 else
1122 arg2 = integer_three_node;
1124 /* Argument 0 is an address. */
1125 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1127 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1128 if (TREE_CODE (arg1) != INTEGER_CST)
1130 error ("second argument to %<__builtin_prefetch%> must be a constant");
1131 arg1 = integer_zero_node;
1133 op1 = expand_normal (arg1);
1134 /* Argument 1 must be either zero or one. */
1135 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1137 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1138 " using zero");
1139 op1 = const0_rtx;
1142 /* Argument 2 (locality) must be a compile-time constant int. */
1143 if (TREE_CODE (arg2) != INTEGER_CST)
1145 error ("third argument to %<__builtin_prefetch%> must be a constant");
1146 arg2 = integer_zero_node;
1148 op2 = expand_normal (arg2);
1149 /* Argument 2 must be 0, 1, 2, or 3. */
1150 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1152 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1153 op2 = const0_rtx;
1156 #ifdef HAVE_prefetch
1157 if (HAVE_prefetch)
1159 struct expand_operand ops[3];
1161 create_address_operand (&ops[0], op0);
1162 create_integer_operand (&ops[1], INTVAL (op1));
1163 create_integer_operand (&ops[2], INTVAL (op2));
1164 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1165 return;
1167 #endif
1169 /* Don't do anything with direct references to volatile memory, but
1170 generate code to handle other side effects. */
1171 if (!MEM_P (op0) && side_effects_p (op0))
1172 emit_insn (op0);
1175 /* Get a MEM rtx for expression EXP which is the address of an operand
1176 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1177 the maximum length of the block of memory that might be accessed or
1178 NULL if unknown. */
1180 static rtx
1181 get_memory_rtx (tree exp, tree len)
1183 tree orig_exp = exp;
1184 rtx addr, mem;
1185 HOST_WIDE_INT off;
1187 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1188 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1189 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1190 exp = TREE_OPERAND (exp, 0);
1192 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1193 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1195 /* Get an expression we can use to find the attributes to assign to MEM.
1196 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1197 we can. First remove any nops. */
1198 while (CONVERT_EXPR_P (exp)
1199 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1200 exp = TREE_OPERAND (exp, 0);
1202 off = 0;
1203 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1204 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1205 && host_integerp (TREE_OPERAND (exp, 1), 0)
1206 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1207 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1208 else if (TREE_CODE (exp) == ADDR_EXPR)
1209 exp = TREE_OPERAND (exp, 0);
1210 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1211 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1212 else
1213 exp = NULL;
1215 /* Honor attributes derived from exp, except for the alias set
1216 (as builtin stringops may alias with anything) and the size
1217 (as stringops may access multiple array elements). */
1218 if (exp)
1220 set_mem_attributes (mem, exp, 0);
1222 if (off)
1223 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1225 /* Allow the string and memory builtins to overflow from one
1226 field into another, see http://gcc.gnu.org/PR23561.
1227 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1228 memory accessed by the string or memory builtin will fit
1229 within the field. */
1230 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1232 tree mem_expr = MEM_EXPR (mem);
1233 HOST_WIDE_INT offset = -1, length = -1;
1234 tree inner = exp;
1236 while (TREE_CODE (inner) == ARRAY_REF
1237 || CONVERT_EXPR_P (inner)
1238 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1239 || TREE_CODE (inner) == SAVE_EXPR)
1240 inner = TREE_OPERAND (inner, 0);
1242 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1244 if (MEM_OFFSET (mem)
1245 && CONST_INT_P (MEM_OFFSET (mem)))
1246 offset = INTVAL (MEM_OFFSET (mem));
1248 if (offset >= 0 && len && host_integerp (len, 0))
1249 length = tree_low_cst (len, 0);
1251 while (TREE_CODE (inner) == COMPONENT_REF)
1253 tree field = TREE_OPERAND (inner, 1);
1254 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1255 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1257 /* Bitfields are generally not byte-addressable. */
1258 gcc_assert (!DECL_BIT_FIELD (field)
1259 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1260 % BITS_PER_UNIT) == 0
1261 && host_integerp (DECL_SIZE (field), 0)
1262 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1263 % BITS_PER_UNIT) == 0));
1265 /* If we can prove that the memory starting at XEXP (mem, 0) and
1266 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1267 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1268 fields without DECL_SIZE_UNIT like flexible array members. */
1269 if (length >= 0
1270 && DECL_SIZE_UNIT (field)
1271 && host_integerp (DECL_SIZE_UNIT (field), 0))
1273 HOST_WIDE_INT size
1274 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1275 if (offset <= size
1276 && length <= size
1277 && offset + length <= size)
1278 break;
1281 if (offset >= 0
1282 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1283 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1284 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1285 / BITS_PER_UNIT;
1286 else
1288 offset = -1;
1289 length = -1;
1292 mem_expr = TREE_OPERAND (mem_expr, 0);
1293 inner = TREE_OPERAND (inner, 0);
1296 if (mem_expr == NULL)
1297 offset = -1;
1298 if (mem_expr != MEM_EXPR (mem))
1300 set_mem_expr (mem, mem_expr);
1301 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1304 set_mem_alias_set (mem, 0);
1305 set_mem_size (mem, NULL_RTX);
1308 return mem;
1311 /* Built-in functions to perform an untyped call and return. */
1313 #define apply_args_mode \
1314 (this_target_builtins->x_apply_args_mode)
1315 #define apply_result_mode \
1316 (this_target_builtins->x_apply_result_mode)
1318 /* Return the size required for the block returned by __builtin_apply_args,
1319 and initialize apply_args_mode. */
1321 static int
1322 apply_args_size (void)
1324 static int size = -1;
1325 int align;
1326 unsigned int regno;
1327 enum machine_mode mode;
1329 /* The values computed by this function never change. */
1330 if (size < 0)
1332 /* The first value is the incoming arg-pointer. */
1333 size = GET_MODE_SIZE (Pmode);
1335 /* The second value is the structure value address unless this is
1336 passed as an "invisible" first argument. */
1337 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1338 size += GET_MODE_SIZE (Pmode);
1340 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1341 if (FUNCTION_ARG_REGNO_P (regno))
1343 mode = targetm.calls.get_raw_arg_mode (regno);
1345 gcc_assert (mode != VOIDmode);
1347 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1348 if (size % align != 0)
1349 size = CEIL (size, align) * align;
1350 size += GET_MODE_SIZE (mode);
1351 apply_args_mode[regno] = mode;
1353 else
1355 apply_args_mode[regno] = VOIDmode;
1358 return size;
1361 /* Return the size required for the block returned by __builtin_apply,
1362 and initialize apply_result_mode. */
1364 static int
1365 apply_result_size (void)
1367 static int size = -1;
1368 int align, regno;
1369 enum machine_mode mode;
1371 /* The values computed by this function never change. */
1372 if (size < 0)
1374 size = 0;
1376 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1377 if (targetm.calls.function_value_regno_p (regno))
1379 mode = targetm.calls.get_raw_result_mode (regno);
1381 gcc_assert (mode != VOIDmode);
1383 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1384 if (size % align != 0)
1385 size = CEIL (size, align) * align;
1386 size += GET_MODE_SIZE (mode);
1387 apply_result_mode[regno] = mode;
1389 else
1390 apply_result_mode[regno] = VOIDmode;
1392 /* Allow targets that use untyped_call and untyped_return to override
1393 the size so that machine-specific information can be stored here. */
1394 #ifdef APPLY_RESULT_SIZE
1395 size = APPLY_RESULT_SIZE;
1396 #endif
1398 return size;
1401 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1402 /* Create a vector describing the result block RESULT. If SAVEP is true,
1403 the result block is used to save the values; otherwise it is used to
1404 restore the values. */
1406 static rtx
1407 result_vector (int savep, rtx result)
1409 int regno, size, align, nelts;
1410 enum machine_mode mode;
1411 rtx reg, mem;
1412 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1414 size = nelts = 0;
1415 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1416 if ((mode = apply_result_mode[regno]) != VOIDmode)
1418 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1419 if (size % align != 0)
1420 size = CEIL (size, align) * align;
1421 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1422 mem = adjust_address (result, mode, size);
1423 savevec[nelts++] = (savep
1424 ? gen_rtx_SET (VOIDmode, mem, reg)
1425 : gen_rtx_SET (VOIDmode, reg, mem));
1426 size += GET_MODE_SIZE (mode);
1428 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1430 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1432 /* Save the state required to perform an untyped call with the same
1433 arguments as were passed to the current function. */
1435 static rtx
1436 expand_builtin_apply_args_1 (void)
1438 rtx registers, tem;
1439 int size, align, regno;
1440 enum machine_mode mode;
1441 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1443 /* Create a block where the arg-pointer, structure value address,
1444 and argument registers can be saved. */
1445 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1447 /* Walk past the arg-pointer and structure value address. */
1448 size = GET_MODE_SIZE (Pmode);
1449 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1450 size += GET_MODE_SIZE (Pmode);
1452 /* Save each register used in calling a function to the block. */
1453 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1454 if ((mode = apply_args_mode[regno]) != VOIDmode)
1456 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1457 if (size % align != 0)
1458 size = CEIL (size, align) * align;
1460 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1462 emit_move_insn (adjust_address (registers, mode, size), tem);
1463 size += GET_MODE_SIZE (mode);
1466 /* Save the arg pointer to the block. */
1467 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1468 #ifdef STACK_GROWS_DOWNWARD
1469 /* We need the pointer as the caller actually passed them to us, not
1470 as we might have pretended they were passed. Make sure it's a valid
1471 operand, as emit_move_insn isn't expected to handle a PLUS. */
1473 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1474 NULL_RTX);
1475 #endif
1476 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1478 size = GET_MODE_SIZE (Pmode);
1480 /* Save the structure value address unless this is passed as an
1481 "invisible" first argument. */
1482 if (struct_incoming_value)
1484 emit_move_insn (adjust_address (registers, Pmode, size),
1485 copy_to_reg (struct_incoming_value));
1486 size += GET_MODE_SIZE (Pmode);
1489 /* Return the address of the block. */
1490 return copy_addr_to_reg (XEXP (registers, 0));
1493 /* __builtin_apply_args returns block of memory allocated on
1494 the stack into which is stored the arg pointer, structure
1495 value address, static chain, and all the registers that might
1496 possibly be used in performing a function call. The code is
1497 moved to the start of the function so the incoming values are
1498 saved. */
1500 static rtx
1501 expand_builtin_apply_args (void)
1503 /* Don't do __builtin_apply_args more than once in a function.
1504 Save the result of the first call and reuse it. */
1505 if (apply_args_value != 0)
1506 return apply_args_value;
1508 /* When this function is called, it means that registers must be
1509 saved on entry to this function. So we migrate the
1510 call to the first insn of this function. */
1511 rtx temp;
1512 rtx seq;
1514 start_sequence ();
1515 temp = expand_builtin_apply_args_1 ();
1516 seq = get_insns ();
1517 end_sequence ();
1519 apply_args_value = temp;
1521 /* Put the insns after the NOTE that starts the function.
1522 If this is inside a start_sequence, make the outer-level insn
1523 chain current, so the code is placed at the start of the
1524 function. If internal_arg_pointer is a non-virtual pseudo,
1525 it needs to be placed after the function that initializes
1526 that pseudo. */
1527 push_topmost_sequence ();
1528 if (REG_P (crtl->args.internal_arg_pointer)
1529 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1530 emit_insn_before (seq, parm_birth_insn);
1531 else
1532 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1533 pop_topmost_sequence ();
1534 return temp;
1538 /* Perform an untyped call and save the state required to perform an
1539 untyped return of whatever value was returned by the given function. */
1541 static rtx
1542 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1544 int size, align, regno;
1545 enum machine_mode mode;
1546 rtx incoming_args, result, reg, dest, src, call_insn;
1547 rtx old_stack_level = 0;
1548 rtx call_fusage = 0;
1549 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1551 arguments = convert_memory_address (Pmode, arguments);
1553 /* Create a block where the return registers can be saved. */
1554 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1556 /* Fetch the arg pointer from the ARGUMENTS block. */
1557 incoming_args = gen_reg_rtx (Pmode);
1558 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1559 #ifndef STACK_GROWS_DOWNWARD
1560 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1561 incoming_args, 0, OPTAB_LIB_WIDEN);
1562 #endif
1564 /* Push a new argument block and copy the arguments. Do not allow
1565 the (potential) memcpy call below to interfere with our stack
1566 manipulations. */
1567 do_pending_stack_adjust ();
1568 NO_DEFER_POP;
1570 /* Save the stack with nonlocal if available. */
1571 #ifdef HAVE_save_stack_nonlocal
1572 if (HAVE_save_stack_nonlocal)
1573 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1574 else
1575 #endif
1576 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1578 /* Allocate a block of memory onto the stack and copy the memory
1579 arguments to the outgoing arguments address. We can pass TRUE
1580 as the 4th argument because we just saved the stack pointer
1581 and will restore it right after the call. */
1582 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1584 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1585 may have already set current_function_calls_alloca to true.
1586 current_function_calls_alloca won't be set if argsize is zero,
1587 so we have to guarantee need_drap is true here. */
1588 if (SUPPORTS_STACK_ALIGNMENT)
1589 crtl->need_drap = true;
1591 dest = virtual_outgoing_args_rtx;
1592 #ifndef STACK_GROWS_DOWNWARD
1593 if (CONST_INT_P (argsize))
1594 dest = plus_constant (dest, -INTVAL (argsize));
1595 else
1596 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1597 #endif
1598 dest = gen_rtx_MEM (BLKmode, dest);
1599 set_mem_align (dest, PARM_BOUNDARY);
1600 src = gen_rtx_MEM (BLKmode, incoming_args);
1601 set_mem_align (src, PARM_BOUNDARY);
1602 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1604 /* Refer to the argument block. */
1605 apply_args_size ();
1606 arguments = gen_rtx_MEM (BLKmode, arguments);
1607 set_mem_align (arguments, PARM_BOUNDARY);
1609 /* Walk past the arg-pointer and structure value address. */
1610 size = GET_MODE_SIZE (Pmode);
1611 if (struct_value)
1612 size += GET_MODE_SIZE (Pmode);
1614 /* Restore each of the registers previously saved. Make USE insns
1615 for each of these registers for use in making the call. */
1616 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1617 if ((mode = apply_args_mode[regno]) != VOIDmode)
1619 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1620 if (size % align != 0)
1621 size = CEIL (size, align) * align;
1622 reg = gen_rtx_REG (mode, regno);
1623 emit_move_insn (reg, adjust_address (arguments, mode, size));
1624 use_reg (&call_fusage, reg);
1625 size += GET_MODE_SIZE (mode);
1628 /* Restore the structure value address unless this is passed as an
1629 "invisible" first argument. */
1630 size = GET_MODE_SIZE (Pmode);
1631 if (struct_value)
1633 rtx value = gen_reg_rtx (Pmode);
1634 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1635 emit_move_insn (struct_value, value);
1636 if (REG_P (struct_value))
1637 use_reg (&call_fusage, struct_value);
1638 size += GET_MODE_SIZE (Pmode);
1641 /* All arguments and registers used for the call are set up by now! */
1642 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1644 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1645 and we don't want to load it into a register as an optimization,
1646 because prepare_call_address already did it if it should be done. */
1647 if (GET_CODE (function) != SYMBOL_REF)
1648 function = memory_address (FUNCTION_MODE, function);
1650 /* Generate the actual call instruction and save the return value. */
1651 #ifdef HAVE_untyped_call
1652 if (HAVE_untyped_call)
1653 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1654 result, result_vector (1, result)));
1655 else
1656 #endif
1657 #ifdef HAVE_call_value
1658 if (HAVE_call_value)
1660 rtx valreg = 0;
1662 /* Locate the unique return register. It is not possible to
1663 express a call that sets more than one return register using
1664 call_value; use untyped_call for that. In fact, untyped_call
1665 only needs to save the return registers in the given block. */
1666 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1667 if ((mode = apply_result_mode[regno]) != VOIDmode)
1669 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1671 valreg = gen_rtx_REG (mode, regno);
1674 emit_call_insn (GEN_CALL_VALUE (valreg,
1675 gen_rtx_MEM (FUNCTION_MODE, function),
1676 const0_rtx, NULL_RTX, const0_rtx));
1678 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1680 else
1681 #endif
1682 gcc_unreachable ();
1684 /* Find the CALL insn we just emitted, and attach the register usage
1685 information. */
1686 call_insn = last_call_insn ();
1687 add_function_usage_to (call_insn, call_fusage);
1689 /* Restore the stack. */
1690 #ifdef HAVE_save_stack_nonlocal
1691 if (HAVE_save_stack_nonlocal)
1692 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1693 else
1694 #endif
1695 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1697 OK_DEFER_POP;
1699 /* Return the address of the result block. */
1700 result = copy_addr_to_reg (XEXP (result, 0));
1701 return convert_memory_address (ptr_mode, result);
1704 /* Perform an untyped return. */
1706 static void
1707 expand_builtin_return (rtx result)
1709 int size, align, regno;
1710 enum machine_mode mode;
1711 rtx reg;
1712 rtx call_fusage = 0;
1714 result = convert_memory_address (Pmode, result);
1716 apply_result_size ();
1717 result = gen_rtx_MEM (BLKmode, result);
1719 #ifdef HAVE_untyped_return
1720 if (HAVE_untyped_return)
1722 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1723 emit_barrier ();
1724 return;
1726 #endif
1728 /* Restore the return value and note that each value is used. */
1729 size = 0;
1730 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1731 if ((mode = apply_result_mode[regno]) != VOIDmode)
1733 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1734 if (size % align != 0)
1735 size = CEIL (size, align) * align;
1736 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1737 emit_move_insn (reg, adjust_address (result, mode, size));
1739 push_to_sequence (call_fusage);
1740 emit_use (reg);
1741 call_fusage = get_insns ();
1742 end_sequence ();
1743 size += GET_MODE_SIZE (mode);
1746 /* Put the USE insns before the return. */
1747 emit_insn (call_fusage);
1749 /* Return whatever values was restored by jumping directly to the end
1750 of the function. */
1751 expand_naked_return ();
1754 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1756 static enum type_class
1757 type_to_class (tree type)
1759 switch (TREE_CODE (type))
1761 case VOID_TYPE: return void_type_class;
1762 case INTEGER_TYPE: return integer_type_class;
1763 case ENUMERAL_TYPE: return enumeral_type_class;
1764 case BOOLEAN_TYPE: return boolean_type_class;
1765 case POINTER_TYPE: return pointer_type_class;
1766 case REFERENCE_TYPE: return reference_type_class;
1767 case OFFSET_TYPE: return offset_type_class;
1768 case REAL_TYPE: return real_type_class;
1769 case COMPLEX_TYPE: return complex_type_class;
1770 case FUNCTION_TYPE: return function_type_class;
1771 case METHOD_TYPE: return method_type_class;
1772 case RECORD_TYPE: return record_type_class;
1773 case UNION_TYPE:
1774 case QUAL_UNION_TYPE: return union_type_class;
1775 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1776 ? string_type_class : array_type_class);
1777 case LANG_TYPE: return lang_type_class;
1778 default: return no_type_class;
1782 /* Expand a call EXP to __builtin_classify_type. */
1784 static rtx
1785 expand_builtin_classify_type (tree exp)
1787 if (call_expr_nargs (exp))
1788 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1789 return GEN_INT (no_type_class);
1792 /* This helper macro, meant to be used in mathfn_built_in below,
1793 determines which among a set of three builtin math functions is
1794 appropriate for a given type mode. The `F' and `L' cases are
1795 automatically generated from the `double' case. */
1796 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1797 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1798 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1799 fcodel = BUILT_IN_MATHFN##L ; break;
1800 /* Similar to above, but appends _R after any F/L suffix. */
1801 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1802 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1803 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1804 fcodel = BUILT_IN_MATHFN##L_R ; break;
1806 /* Return mathematic function equivalent to FN but operating directly
1807 on TYPE, if available. If IMPLICIT is true find the function in
1808 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1809 can't do the conversion, return zero. */
1811 static tree
1812 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1814 tree const *const fn_arr
1815 = implicit ? implicit_built_in_decls : built_in_decls;
1816 enum built_in_function fcode, fcodef, fcodel;
1818 switch (fn)
1820 CASE_MATHFN (BUILT_IN_ACOS)
1821 CASE_MATHFN (BUILT_IN_ACOSH)
1822 CASE_MATHFN (BUILT_IN_ASIN)
1823 CASE_MATHFN (BUILT_IN_ASINH)
1824 CASE_MATHFN (BUILT_IN_ATAN)
1825 CASE_MATHFN (BUILT_IN_ATAN2)
1826 CASE_MATHFN (BUILT_IN_ATANH)
1827 CASE_MATHFN (BUILT_IN_CBRT)
1828 CASE_MATHFN (BUILT_IN_CEIL)
1829 CASE_MATHFN (BUILT_IN_CEXPI)
1830 CASE_MATHFN (BUILT_IN_COPYSIGN)
1831 CASE_MATHFN (BUILT_IN_COS)
1832 CASE_MATHFN (BUILT_IN_COSH)
1833 CASE_MATHFN (BUILT_IN_DREM)
1834 CASE_MATHFN (BUILT_IN_ERF)
1835 CASE_MATHFN (BUILT_IN_ERFC)
1836 CASE_MATHFN (BUILT_IN_EXP)
1837 CASE_MATHFN (BUILT_IN_EXP10)
1838 CASE_MATHFN (BUILT_IN_EXP2)
1839 CASE_MATHFN (BUILT_IN_EXPM1)
1840 CASE_MATHFN (BUILT_IN_FABS)
1841 CASE_MATHFN (BUILT_IN_FDIM)
1842 CASE_MATHFN (BUILT_IN_FLOOR)
1843 CASE_MATHFN (BUILT_IN_FMA)
1844 CASE_MATHFN (BUILT_IN_FMAX)
1845 CASE_MATHFN (BUILT_IN_FMIN)
1846 CASE_MATHFN (BUILT_IN_FMOD)
1847 CASE_MATHFN (BUILT_IN_FREXP)
1848 CASE_MATHFN (BUILT_IN_GAMMA)
1849 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1850 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1851 CASE_MATHFN (BUILT_IN_HYPOT)
1852 CASE_MATHFN (BUILT_IN_ILOGB)
1853 CASE_MATHFN (BUILT_IN_INF)
1854 CASE_MATHFN (BUILT_IN_ISINF)
1855 CASE_MATHFN (BUILT_IN_J0)
1856 CASE_MATHFN (BUILT_IN_J1)
1857 CASE_MATHFN (BUILT_IN_JN)
1858 CASE_MATHFN (BUILT_IN_LCEIL)
1859 CASE_MATHFN (BUILT_IN_LDEXP)
1860 CASE_MATHFN (BUILT_IN_LFLOOR)
1861 CASE_MATHFN (BUILT_IN_LGAMMA)
1862 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1863 CASE_MATHFN (BUILT_IN_LLCEIL)
1864 CASE_MATHFN (BUILT_IN_LLFLOOR)
1865 CASE_MATHFN (BUILT_IN_LLRINT)
1866 CASE_MATHFN (BUILT_IN_LLROUND)
1867 CASE_MATHFN (BUILT_IN_LOG)
1868 CASE_MATHFN (BUILT_IN_LOG10)
1869 CASE_MATHFN (BUILT_IN_LOG1P)
1870 CASE_MATHFN (BUILT_IN_LOG2)
1871 CASE_MATHFN (BUILT_IN_LOGB)
1872 CASE_MATHFN (BUILT_IN_LRINT)
1873 CASE_MATHFN (BUILT_IN_LROUND)
1874 CASE_MATHFN (BUILT_IN_MODF)
1875 CASE_MATHFN (BUILT_IN_NAN)
1876 CASE_MATHFN (BUILT_IN_NANS)
1877 CASE_MATHFN (BUILT_IN_NEARBYINT)
1878 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1879 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1880 CASE_MATHFN (BUILT_IN_POW)
1881 CASE_MATHFN (BUILT_IN_POWI)
1882 CASE_MATHFN (BUILT_IN_POW10)
1883 CASE_MATHFN (BUILT_IN_REMAINDER)
1884 CASE_MATHFN (BUILT_IN_REMQUO)
1885 CASE_MATHFN (BUILT_IN_RINT)
1886 CASE_MATHFN (BUILT_IN_ROUND)
1887 CASE_MATHFN (BUILT_IN_SCALB)
1888 CASE_MATHFN (BUILT_IN_SCALBLN)
1889 CASE_MATHFN (BUILT_IN_SCALBN)
1890 CASE_MATHFN (BUILT_IN_SIGNBIT)
1891 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1892 CASE_MATHFN (BUILT_IN_SIN)
1893 CASE_MATHFN (BUILT_IN_SINCOS)
1894 CASE_MATHFN (BUILT_IN_SINH)
1895 CASE_MATHFN (BUILT_IN_SQRT)
1896 CASE_MATHFN (BUILT_IN_TAN)
1897 CASE_MATHFN (BUILT_IN_TANH)
1898 CASE_MATHFN (BUILT_IN_TGAMMA)
1899 CASE_MATHFN (BUILT_IN_TRUNC)
1900 CASE_MATHFN (BUILT_IN_Y0)
1901 CASE_MATHFN (BUILT_IN_Y1)
1902 CASE_MATHFN (BUILT_IN_YN)
1904 default:
1905 return NULL_TREE;
1908 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1909 return fn_arr[fcode];
1910 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1911 return fn_arr[fcodef];
1912 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1913 return fn_arr[fcodel];
1914 else
1915 return NULL_TREE;
1918 /* Like mathfn_built_in_1(), but always use the implicit array. */
1920 tree
1921 mathfn_built_in (tree type, enum built_in_function fn)
1923 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1926 /* If errno must be maintained, expand the RTL to check if the result,
1927 TARGET, of a built-in function call, EXP, is NaN, and if so set
1928 errno to EDOM. */
1930 static void
1931 expand_errno_check (tree exp, rtx target)
1933 rtx lab = gen_label_rtx ();
1935 /* Test the result; if it is NaN, set errno=EDOM because
1936 the argument was not in the domain. */
1937 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1938 NULL_RTX, NULL_RTX, lab,
1939 /* The jump is very likely. */
1940 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1942 #ifdef TARGET_EDOM
1943 /* If this built-in doesn't throw an exception, set errno directly. */
1944 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1946 #ifdef GEN_ERRNO_RTX
1947 rtx errno_rtx = GEN_ERRNO_RTX;
1948 #else
1949 rtx errno_rtx
1950 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1951 #endif
1952 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1953 emit_label (lab);
1954 return;
1956 #endif
1958 /* Make sure the library call isn't expanded as a tail call. */
1959 CALL_EXPR_TAILCALL (exp) = 0;
1961 /* We can't set errno=EDOM directly; let the library call do it.
1962 Pop the arguments right away in case the call gets deleted. */
1963 NO_DEFER_POP;
1964 expand_call (exp, target, 0);
1965 OK_DEFER_POP;
1966 emit_label (lab);
1969 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1970 Return NULL_RTX if a normal call should be emitted rather than expanding
1971 the function in-line. EXP is the expression that is a call to the builtin
1972 function; if convenient, the result should be placed in TARGET.
1973 SUBTARGET may be used as the target for computing one of EXP's operands. */
1975 static rtx
1976 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1978 optab builtin_optab;
1979 rtx op0, insns;
1980 tree fndecl = get_callee_fndecl (exp);
1981 enum machine_mode mode;
1982 bool errno_set = false;
1983 tree arg;
1985 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1986 return NULL_RTX;
1988 arg = CALL_EXPR_ARG (exp, 0);
1990 switch (DECL_FUNCTION_CODE (fndecl))
1992 CASE_FLT_FN (BUILT_IN_SQRT):
1993 errno_set = ! tree_expr_nonnegative_p (arg);
1994 builtin_optab = sqrt_optab;
1995 break;
1996 CASE_FLT_FN (BUILT_IN_EXP):
1997 errno_set = true; builtin_optab = exp_optab; break;
1998 CASE_FLT_FN (BUILT_IN_EXP10):
1999 CASE_FLT_FN (BUILT_IN_POW10):
2000 errno_set = true; builtin_optab = exp10_optab; break;
2001 CASE_FLT_FN (BUILT_IN_EXP2):
2002 errno_set = true; builtin_optab = exp2_optab; break;
2003 CASE_FLT_FN (BUILT_IN_EXPM1):
2004 errno_set = true; builtin_optab = expm1_optab; break;
2005 CASE_FLT_FN (BUILT_IN_LOGB):
2006 errno_set = true; builtin_optab = logb_optab; break;
2007 CASE_FLT_FN (BUILT_IN_LOG):
2008 errno_set = true; builtin_optab = log_optab; break;
2009 CASE_FLT_FN (BUILT_IN_LOG10):
2010 errno_set = true; builtin_optab = log10_optab; break;
2011 CASE_FLT_FN (BUILT_IN_LOG2):
2012 errno_set = true; builtin_optab = log2_optab; break;
2013 CASE_FLT_FN (BUILT_IN_LOG1P):
2014 errno_set = true; builtin_optab = log1p_optab; break;
2015 CASE_FLT_FN (BUILT_IN_ASIN):
2016 builtin_optab = asin_optab; break;
2017 CASE_FLT_FN (BUILT_IN_ACOS):
2018 builtin_optab = acos_optab; break;
2019 CASE_FLT_FN (BUILT_IN_TAN):
2020 builtin_optab = tan_optab; break;
2021 CASE_FLT_FN (BUILT_IN_ATAN):
2022 builtin_optab = atan_optab; break;
2023 CASE_FLT_FN (BUILT_IN_FLOOR):
2024 builtin_optab = floor_optab; break;
2025 CASE_FLT_FN (BUILT_IN_CEIL):
2026 builtin_optab = ceil_optab; break;
2027 CASE_FLT_FN (BUILT_IN_TRUNC):
2028 builtin_optab = btrunc_optab; break;
2029 CASE_FLT_FN (BUILT_IN_ROUND):
2030 builtin_optab = round_optab; break;
2031 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2032 builtin_optab = nearbyint_optab;
2033 if (flag_trapping_math)
2034 break;
2035 /* Else fallthrough and expand as rint. */
2036 CASE_FLT_FN (BUILT_IN_RINT):
2037 builtin_optab = rint_optab; break;
2038 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2039 builtin_optab = significand_optab; break;
2040 default:
2041 gcc_unreachable ();
2044 /* Make a suitable register to place result in. */
2045 mode = TYPE_MODE (TREE_TYPE (exp));
2047 if (! flag_errno_math || ! HONOR_NANS (mode))
2048 errno_set = false;
2050 /* Before working hard, check whether the instruction is available. */
2051 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2052 && (!errno_set || !optimize_insn_for_size_p ()))
2054 target = gen_reg_rtx (mode);
2056 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2057 need to expand the argument again. This way, we will not perform
2058 side-effects more the once. */
2059 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2061 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2063 start_sequence ();
2065 /* Compute into TARGET.
2066 Set TARGET to wherever the result comes back. */
2067 target = expand_unop (mode, builtin_optab, op0, target, 0);
2069 if (target != 0)
2071 if (errno_set)
2072 expand_errno_check (exp, target);
2074 /* Output the entire sequence. */
2075 insns = get_insns ();
2076 end_sequence ();
2077 emit_insn (insns);
2078 return target;
2081 /* If we were unable to expand via the builtin, stop the sequence
2082 (without outputting the insns) and call to the library function
2083 with the stabilized argument list. */
2084 end_sequence ();
2087 return expand_call (exp, target, target == const0_rtx);
2090 /* Expand a call to the builtin binary math functions (pow and atan2).
2091 Return NULL_RTX if a normal call should be emitted rather than expanding the
2092 function in-line. EXP is the expression that is a call to the builtin
2093 function; if convenient, the result should be placed in TARGET.
2094 SUBTARGET may be used as the target for computing one of EXP's
2095 operands. */
2097 static rtx
2098 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2100 optab builtin_optab;
2101 rtx op0, op1, insns;
2102 int op1_type = REAL_TYPE;
2103 tree fndecl = get_callee_fndecl (exp);
2104 tree arg0, arg1;
2105 enum machine_mode mode;
2106 bool errno_set = true;
2108 switch (DECL_FUNCTION_CODE (fndecl))
2110 CASE_FLT_FN (BUILT_IN_SCALBN):
2111 CASE_FLT_FN (BUILT_IN_SCALBLN):
2112 CASE_FLT_FN (BUILT_IN_LDEXP):
2113 op1_type = INTEGER_TYPE;
2114 default:
2115 break;
2118 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2119 return NULL_RTX;
2121 arg0 = CALL_EXPR_ARG (exp, 0);
2122 arg1 = CALL_EXPR_ARG (exp, 1);
2124 switch (DECL_FUNCTION_CODE (fndecl))
2126 CASE_FLT_FN (BUILT_IN_POW):
2127 builtin_optab = pow_optab; break;
2128 CASE_FLT_FN (BUILT_IN_ATAN2):
2129 builtin_optab = atan2_optab; break;
2130 CASE_FLT_FN (BUILT_IN_SCALB):
2131 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2132 return 0;
2133 builtin_optab = scalb_optab; break;
2134 CASE_FLT_FN (BUILT_IN_SCALBN):
2135 CASE_FLT_FN (BUILT_IN_SCALBLN):
2136 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2137 return 0;
2138 /* Fall through... */
2139 CASE_FLT_FN (BUILT_IN_LDEXP):
2140 builtin_optab = ldexp_optab; break;
2141 CASE_FLT_FN (BUILT_IN_FMOD):
2142 builtin_optab = fmod_optab; break;
2143 CASE_FLT_FN (BUILT_IN_REMAINDER):
2144 CASE_FLT_FN (BUILT_IN_DREM):
2145 builtin_optab = remainder_optab; break;
2146 default:
2147 gcc_unreachable ();
2150 /* Make a suitable register to place result in. */
2151 mode = TYPE_MODE (TREE_TYPE (exp));
2153 /* Before working hard, check whether the instruction is available. */
2154 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2155 return NULL_RTX;
2157 target = gen_reg_rtx (mode);
2159 if (! flag_errno_math || ! HONOR_NANS (mode))
2160 errno_set = false;
2162 if (errno_set && optimize_insn_for_size_p ())
2163 return 0;
2165 /* Always stabilize the argument list. */
2166 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2167 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2169 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2170 op1 = expand_normal (arg1);
2172 start_sequence ();
2174 /* Compute into TARGET.
2175 Set TARGET to wherever the result comes back. */
2176 target = expand_binop (mode, builtin_optab, op0, op1,
2177 target, 0, OPTAB_DIRECT);
2179 /* If we were unable to expand via the builtin, stop the sequence
2180 (without outputting the insns) and call to the library function
2181 with the stabilized argument list. */
2182 if (target == 0)
2184 end_sequence ();
2185 return expand_call (exp, target, target == const0_rtx);
2188 if (errno_set)
2189 expand_errno_check (exp, target);
2191 /* Output the entire sequence. */
2192 insns = get_insns ();
2193 end_sequence ();
2194 emit_insn (insns);
2196 return target;
2199 /* Expand a call to the builtin trinary math functions (fma).
2200 Return NULL_RTX if a normal call should be emitted rather than expanding the
2201 function in-line. EXP is the expression that is a call to the builtin
2202 function; if convenient, the result should be placed in TARGET.
2203 SUBTARGET may be used as the target for computing one of EXP's
2204 operands. */
2206 static rtx
2207 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2209 optab builtin_optab;
2210 rtx op0, op1, op2, insns;
2211 tree fndecl = get_callee_fndecl (exp);
2212 tree arg0, arg1, arg2;
2213 enum machine_mode mode;
2215 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2216 return NULL_RTX;
2218 arg0 = CALL_EXPR_ARG (exp, 0);
2219 arg1 = CALL_EXPR_ARG (exp, 1);
2220 arg2 = CALL_EXPR_ARG (exp, 2);
2222 switch (DECL_FUNCTION_CODE (fndecl))
2224 CASE_FLT_FN (BUILT_IN_FMA):
2225 builtin_optab = fma_optab; break;
2226 default:
2227 gcc_unreachable ();
2230 /* Make a suitable register to place result in. */
2231 mode = TYPE_MODE (TREE_TYPE (exp));
2233 /* Before working hard, check whether the instruction is available. */
2234 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2235 return NULL_RTX;
2237 target = gen_reg_rtx (mode);
2239 /* Always stabilize the argument list. */
2240 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2241 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2242 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2244 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2245 op1 = expand_normal (arg1);
2246 op2 = expand_normal (arg2);
2248 start_sequence ();
2250 /* Compute into TARGET.
2251 Set TARGET to wherever the result comes back. */
2252 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2253 target, 0);
2255 /* If we were unable to expand via the builtin, stop the sequence
2256 (without outputting the insns) and call to the library function
2257 with the stabilized argument list. */
2258 if (target == 0)
2260 end_sequence ();
2261 return expand_call (exp, target, target == const0_rtx);
2264 /* Output the entire sequence. */
2265 insns = get_insns ();
2266 end_sequence ();
2267 emit_insn (insns);
2269 return target;
2272 /* Expand a call to the builtin sin and cos math functions.
2273 Return NULL_RTX if a normal call should be emitted rather than expanding the
2274 function in-line. EXP is the expression that is a call to the builtin
2275 function; if convenient, the result should be placed in TARGET.
2276 SUBTARGET may be used as the target for computing one of EXP's
2277 operands. */
2279 static rtx
2280 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2282 optab builtin_optab;
2283 rtx op0, insns;
2284 tree fndecl = get_callee_fndecl (exp);
2285 enum machine_mode mode;
2286 tree arg;
2288 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2289 return NULL_RTX;
2291 arg = CALL_EXPR_ARG (exp, 0);
2293 switch (DECL_FUNCTION_CODE (fndecl))
2295 CASE_FLT_FN (BUILT_IN_SIN):
2296 CASE_FLT_FN (BUILT_IN_COS):
2297 builtin_optab = sincos_optab; break;
2298 default:
2299 gcc_unreachable ();
2302 /* Make a suitable register to place result in. */
2303 mode = TYPE_MODE (TREE_TYPE (exp));
2305 /* Check if sincos insn is available, otherwise fallback
2306 to sin or cos insn. */
2307 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2308 switch (DECL_FUNCTION_CODE (fndecl))
2310 CASE_FLT_FN (BUILT_IN_SIN):
2311 builtin_optab = sin_optab; break;
2312 CASE_FLT_FN (BUILT_IN_COS):
2313 builtin_optab = cos_optab; break;
2314 default:
2315 gcc_unreachable ();
2318 /* Before working hard, check whether the instruction is available. */
2319 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2321 target = gen_reg_rtx (mode);
2323 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2324 need to expand the argument again. This way, we will not perform
2325 side-effects more the once. */
2326 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2328 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2330 start_sequence ();
2332 /* Compute into TARGET.
2333 Set TARGET to wherever the result comes back. */
2334 if (builtin_optab == sincos_optab)
2336 int result;
2338 switch (DECL_FUNCTION_CODE (fndecl))
2340 CASE_FLT_FN (BUILT_IN_SIN):
2341 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2342 break;
2343 CASE_FLT_FN (BUILT_IN_COS):
2344 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2345 break;
2346 default:
2347 gcc_unreachable ();
2349 gcc_assert (result);
2351 else
2353 target = expand_unop (mode, builtin_optab, op0, target, 0);
2356 if (target != 0)
2358 /* Output the entire sequence. */
2359 insns = get_insns ();
2360 end_sequence ();
2361 emit_insn (insns);
2362 return target;
2365 /* If we were unable to expand via the builtin, stop the sequence
2366 (without outputting the insns) and call to the library function
2367 with the stabilized argument list. */
2368 end_sequence ();
2371 target = expand_call (exp, target, target == const0_rtx);
2373 return target;
2376 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2377 return an RTL instruction code that implements the functionality.
2378 If that isn't possible or available return CODE_FOR_nothing. */
2380 static enum insn_code
2381 interclass_mathfn_icode (tree arg, tree fndecl)
2383 bool errno_set = false;
2384 optab builtin_optab = 0;
2385 enum machine_mode mode;
2387 switch (DECL_FUNCTION_CODE (fndecl))
2389 CASE_FLT_FN (BUILT_IN_ILOGB):
2390 errno_set = true; builtin_optab = ilogb_optab; break;
2391 CASE_FLT_FN (BUILT_IN_ISINF):
2392 builtin_optab = isinf_optab; break;
2393 case BUILT_IN_ISNORMAL:
2394 case BUILT_IN_ISFINITE:
2395 CASE_FLT_FN (BUILT_IN_FINITE):
2396 case BUILT_IN_FINITED32:
2397 case BUILT_IN_FINITED64:
2398 case BUILT_IN_FINITED128:
2399 case BUILT_IN_ISINFD32:
2400 case BUILT_IN_ISINFD64:
2401 case BUILT_IN_ISINFD128:
2402 /* These builtins have no optabs (yet). */
2403 break;
2404 default:
2405 gcc_unreachable ();
2408 /* There's no easy way to detect the case we need to set EDOM. */
2409 if (flag_errno_math && errno_set)
2410 return CODE_FOR_nothing;
2412 /* Optab mode depends on the mode of the input argument. */
2413 mode = TYPE_MODE (TREE_TYPE (arg));
2415 if (builtin_optab)
2416 return optab_handler (builtin_optab, mode);
2417 return CODE_FOR_nothing;
2420 /* Expand a call to one of the builtin math functions that operate on
2421 floating point argument and output an integer result (ilogb, isinf,
2422 isnan, etc).
2423 Return 0 if a normal call should be emitted rather than expanding the
2424 function in-line. EXP is the expression that is a call to the builtin
2425 function; if convenient, the result should be placed in TARGET. */
2427 static rtx
2428 expand_builtin_interclass_mathfn (tree exp, rtx target)
2430 enum insn_code icode = CODE_FOR_nothing;
2431 rtx op0;
2432 tree fndecl = get_callee_fndecl (exp);
2433 enum machine_mode mode;
2434 tree arg;
2436 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2437 return NULL_RTX;
2439 arg = CALL_EXPR_ARG (exp, 0);
2440 icode = interclass_mathfn_icode (arg, fndecl);
2441 mode = TYPE_MODE (TREE_TYPE (arg));
2443 if (icode != CODE_FOR_nothing)
2445 struct expand_operand ops[1];
2446 rtx last = get_last_insn ();
2447 tree orig_arg = arg;
2449 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2450 need to expand the argument again. This way, we will not perform
2451 side-effects more the once. */
2452 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2454 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2456 if (mode != GET_MODE (op0))
2457 op0 = convert_to_mode (mode, op0, 0);
2459 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2460 if (maybe_legitimize_operands (icode, 0, 1, ops)
2461 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2462 return ops[0].value;
2464 delete_insns_since (last);
2465 CALL_EXPR_ARG (exp, 0) = orig_arg;
2468 return NULL_RTX;
2471 /* Expand a call to the builtin sincos math function.
2472 Return NULL_RTX if a normal call should be emitted rather than expanding the
2473 function in-line. EXP is the expression that is a call to the builtin
2474 function. */
2476 static rtx
2477 expand_builtin_sincos (tree exp)
2479 rtx op0, op1, op2, target1, target2;
2480 enum machine_mode mode;
2481 tree arg, sinp, cosp;
2482 int result;
2483 location_t loc = EXPR_LOCATION (exp);
2484 tree alias_type, alias_off;
2486 if (!validate_arglist (exp, REAL_TYPE,
2487 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2488 return NULL_RTX;
2490 arg = CALL_EXPR_ARG (exp, 0);
2491 sinp = CALL_EXPR_ARG (exp, 1);
2492 cosp = CALL_EXPR_ARG (exp, 2);
2494 /* Make a suitable register to place result in. */
2495 mode = TYPE_MODE (TREE_TYPE (arg));
2497 /* Check if sincos insn is available, otherwise emit the call. */
2498 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2499 return NULL_RTX;
2501 target1 = gen_reg_rtx (mode);
2502 target2 = gen_reg_rtx (mode);
2504 op0 = expand_normal (arg);
2505 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2506 alias_off = build_int_cst (alias_type, 0);
2507 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2508 sinp, alias_off));
2509 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2510 cosp, alias_off));
2512 /* Compute into target1 and target2.
2513 Set TARGET to wherever the result comes back. */
2514 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2515 gcc_assert (result);
2517 /* Move target1 and target2 to the memory locations indicated
2518 by op1 and op2. */
2519 emit_move_insn (op1, target1);
2520 emit_move_insn (op2, target2);
2522 return const0_rtx;
2525 /* Expand a call to the internal cexpi builtin to the sincos math function.
2526 EXP is the expression that is a call to the builtin function; if convenient,
2527 the result should be placed in TARGET. */
2529 static rtx
2530 expand_builtin_cexpi (tree exp, rtx target)
2532 tree fndecl = get_callee_fndecl (exp);
2533 tree arg, type;
2534 enum machine_mode mode;
2535 rtx op0, op1, op2;
2536 location_t loc = EXPR_LOCATION (exp);
2538 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2539 return NULL_RTX;
2541 arg = CALL_EXPR_ARG (exp, 0);
2542 type = TREE_TYPE (arg);
2543 mode = TYPE_MODE (TREE_TYPE (arg));
2545 /* Try expanding via a sincos optab, fall back to emitting a libcall
2546 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2547 is only generated from sincos, cexp or if we have either of them. */
2548 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2550 op1 = gen_reg_rtx (mode);
2551 op2 = gen_reg_rtx (mode);
2553 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2555 /* Compute into op1 and op2. */
2556 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2558 else if (TARGET_HAS_SINCOS)
2560 tree call, fn = NULL_TREE;
2561 tree top1, top2;
2562 rtx op1a, op2a;
2564 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2565 fn = built_in_decls[BUILT_IN_SINCOSF];
2566 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2567 fn = built_in_decls[BUILT_IN_SINCOS];
2568 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2569 fn = built_in_decls[BUILT_IN_SINCOSL];
2570 else
2571 gcc_unreachable ();
2573 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2574 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2575 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2576 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2577 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2578 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2580 /* Make sure not to fold the sincos call again. */
2581 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2582 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2583 call, 3, arg, top1, top2));
2585 else
2587 tree call, fn = NULL_TREE, narg;
2588 tree ctype = build_complex_type (type);
2590 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2591 fn = built_in_decls[BUILT_IN_CEXPF];
2592 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2593 fn = built_in_decls[BUILT_IN_CEXP];
2594 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2595 fn = built_in_decls[BUILT_IN_CEXPL];
2596 else
2597 gcc_unreachable ();
2599 /* If we don't have a decl for cexp create one. This is the
2600 friendliest fallback if the user calls __builtin_cexpi
2601 without full target C99 function support. */
2602 if (fn == NULL_TREE)
2604 tree fntype;
2605 const char *name = NULL;
2607 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2608 name = "cexpf";
2609 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2610 name = "cexp";
2611 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2612 name = "cexpl";
2614 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2615 fn = build_fn_decl (name, fntype);
2618 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2619 build_real (type, dconst0), arg);
2621 /* Make sure not to fold the cexp call again. */
2622 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2623 return expand_expr (build_call_nary (ctype, call, 1, narg),
2624 target, VOIDmode, EXPAND_NORMAL);
2627 /* Now build the proper return type. */
2628 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2629 make_tree (TREE_TYPE (arg), op2),
2630 make_tree (TREE_TYPE (arg), op1)),
2631 target, VOIDmode, EXPAND_NORMAL);
2634 /* Conveniently construct a function call expression. FNDECL names the
2635 function to be called, N is the number of arguments, and the "..."
2636 parameters are the argument expressions. Unlike build_call_exr
2637 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2639 static tree
2640 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2642 va_list ap;
2643 tree fntype = TREE_TYPE (fndecl);
2644 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2646 va_start (ap, n);
2647 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2648 va_end (ap);
2649 SET_EXPR_LOCATION (fn, loc);
2650 return fn;
2653 /* Expand a call to one of the builtin rounding functions gcc defines
2654 as an extension (lfloor and lceil). As these are gcc extensions we
2655 do not need to worry about setting errno to EDOM.
2656 If expanding via optab fails, lower expression to (int)(floor(x)).
2657 EXP is the expression that is a call to the builtin function;
2658 if convenient, the result should be placed in TARGET. */
2660 static rtx
2661 expand_builtin_int_roundingfn (tree exp, rtx target)
2663 convert_optab builtin_optab;
2664 rtx op0, insns, tmp;
2665 tree fndecl = get_callee_fndecl (exp);
2666 enum built_in_function fallback_fn;
2667 tree fallback_fndecl;
2668 enum machine_mode mode;
2669 tree arg;
2671 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2672 gcc_unreachable ();
2674 arg = CALL_EXPR_ARG (exp, 0);
2676 switch (DECL_FUNCTION_CODE (fndecl))
2678 CASE_FLT_FN (BUILT_IN_LCEIL):
2679 CASE_FLT_FN (BUILT_IN_LLCEIL):
2680 builtin_optab = lceil_optab;
2681 fallback_fn = BUILT_IN_CEIL;
2682 break;
2684 CASE_FLT_FN (BUILT_IN_LFLOOR):
2685 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2686 builtin_optab = lfloor_optab;
2687 fallback_fn = BUILT_IN_FLOOR;
2688 break;
2690 default:
2691 gcc_unreachable ();
2694 /* Make a suitable register to place result in. */
2695 mode = TYPE_MODE (TREE_TYPE (exp));
2697 target = gen_reg_rtx (mode);
2699 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2700 need to expand the argument again. This way, we will not perform
2701 side-effects more the once. */
2702 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2704 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2706 start_sequence ();
2708 /* Compute into TARGET. */
2709 if (expand_sfix_optab (target, op0, builtin_optab))
2711 /* Output the entire sequence. */
2712 insns = get_insns ();
2713 end_sequence ();
2714 emit_insn (insns);
2715 return target;
2718 /* If we were unable to expand via the builtin, stop the sequence
2719 (without outputting the insns). */
2720 end_sequence ();
2722 /* Fall back to floating point rounding optab. */
2723 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2725 /* For non-C99 targets we may end up without a fallback fndecl here
2726 if the user called __builtin_lfloor directly. In this case emit
2727 a call to the floor/ceil variants nevertheless. This should result
2728 in the best user experience for not full C99 targets. */
2729 if (fallback_fndecl == NULL_TREE)
2731 tree fntype;
2732 const char *name = NULL;
2734 switch (DECL_FUNCTION_CODE (fndecl))
2736 case BUILT_IN_LCEIL:
2737 case BUILT_IN_LLCEIL:
2738 name = "ceil";
2739 break;
2740 case BUILT_IN_LCEILF:
2741 case BUILT_IN_LLCEILF:
2742 name = "ceilf";
2743 break;
2744 case BUILT_IN_LCEILL:
2745 case BUILT_IN_LLCEILL:
2746 name = "ceill";
2747 break;
2748 case BUILT_IN_LFLOOR:
2749 case BUILT_IN_LLFLOOR:
2750 name = "floor";
2751 break;
2752 case BUILT_IN_LFLOORF:
2753 case BUILT_IN_LLFLOORF:
2754 name = "floorf";
2755 break;
2756 case BUILT_IN_LFLOORL:
2757 case BUILT_IN_LLFLOORL:
2758 name = "floorl";
2759 break;
2760 default:
2761 gcc_unreachable ();
2764 fntype = build_function_type_list (TREE_TYPE (arg),
2765 TREE_TYPE (arg), NULL_TREE);
2766 fallback_fndecl = build_fn_decl (name, fntype);
2769 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2771 tmp = expand_normal (exp);
2773 /* Truncate the result of floating point optab to integer
2774 via expand_fix (). */
2775 target = gen_reg_rtx (mode);
2776 expand_fix (target, tmp, 0);
2778 return target;
2781 /* Expand a call to one of the builtin math functions doing integer
2782 conversion (lrint).
2783 Return 0 if a normal call should be emitted rather than expanding the
2784 function in-line. EXP is the expression that is a call to the builtin
2785 function; if convenient, the result should be placed in TARGET. */
2787 static rtx
2788 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2790 convert_optab builtin_optab;
2791 rtx op0, insns;
2792 tree fndecl = get_callee_fndecl (exp);
2793 tree arg;
2794 enum machine_mode mode;
2796 /* There's no easy way to detect the case we need to set EDOM. */
2797 if (flag_errno_math)
2798 return NULL_RTX;
2800 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2801 gcc_unreachable ();
2803 arg = CALL_EXPR_ARG (exp, 0);
2805 switch (DECL_FUNCTION_CODE (fndecl))
2807 CASE_FLT_FN (BUILT_IN_LRINT):
2808 CASE_FLT_FN (BUILT_IN_LLRINT):
2809 builtin_optab = lrint_optab; break;
2810 CASE_FLT_FN (BUILT_IN_LROUND):
2811 CASE_FLT_FN (BUILT_IN_LLROUND):
2812 builtin_optab = lround_optab; break;
2813 default:
2814 gcc_unreachable ();
2817 /* Make a suitable register to place result in. */
2818 mode = TYPE_MODE (TREE_TYPE (exp));
2820 target = gen_reg_rtx (mode);
2822 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2823 need to expand the argument again. This way, we will not perform
2824 side-effects more the once. */
2825 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2827 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2829 start_sequence ();
2831 if (expand_sfix_optab (target, op0, builtin_optab))
2833 /* Output the entire sequence. */
2834 insns = get_insns ();
2835 end_sequence ();
2836 emit_insn (insns);
2837 return target;
2840 /* If we were unable to expand via the builtin, stop the sequence
2841 (without outputting the insns) and call to the library function
2842 with the stabilized argument list. */
2843 end_sequence ();
2845 target = expand_call (exp, target, target == const0_rtx);
2847 return target;
2850 /* To evaluate powi(x,n), the floating point value x raised to the
2851 constant integer exponent n, we use a hybrid algorithm that
2852 combines the "window method" with look-up tables. For an
2853 introduction to exponentiation algorithms and "addition chains",
2854 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2855 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2856 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2857 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2859 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2860 multiplications to inline before calling the system library's pow
2861 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2862 so this default never requires calling pow, powf or powl. */
2864 #ifndef POWI_MAX_MULTS
2865 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2866 #endif
2868 /* The size of the "optimal power tree" lookup table. All
2869 exponents less than this value are simply looked up in the
2870 powi_table below. This threshold is also used to size the
2871 cache of pseudo registers that hold intermediate results. */
2872 #define POWI_TABLE_SIZE 256
2874 /* The size, in bits of the window, used in the "window method"
2875 exponentiation algorithm. This is equivalent to a radix of
2876 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2877 #define POWI_WINDOW_SIZE 3
2879 /* The following table is an efficient representation of an
2880 "optimal power tree". For each value, i, the corresponding
2881 value, j, in the table states than an optimal evaluation
2882 sequence for calculating pow(x,i) can be found by evaluating
2883 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2884 100 integers is given in Knuth's "Seminumerical algorithms". */
2886 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2888 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2889 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2890 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2891 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2892 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2893 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2894 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2895 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2896 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2897 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2898 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2899 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2900 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2901 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2902 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2903 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2904 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2905 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2906 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2907 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2908 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2909 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2910 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2911 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2912 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2913 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2914 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2915 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2916 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2917 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2918 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2919 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2923 /* Return the number of multiplications required to calculate
2924 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2925 subroutine of powi_cost. CACHE is an array indicating
2926 which exponents have already been calculated. */
2928 static int
2929 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2931 /* If we've already calculated this exponent, then this evaluation
2932 doesn't require any additional multiplications. */
2933 if (cache[n])
2934 return 0;
2936 cache[n] = true;
2937 return powi_lookup_cost (n - powi_table[n], cache)
2938 + powi_lookup_cost (powi_table[n], cache) + 1;
2941 /* Return the number of multiplications required to calculate
2942 powi(x,n) for an arbitrary x, given the exponent N. This
2943 function needs to be kept in sync with expand_powi below. */
2945 static int
2946 powi_cost (HOST_WIDE_INT n)
2948 bool cache[POWI_TABLE_SIZE];
2949 unsigned HOST_WIDE_INT digit;
2950 unsigned HOST_WIDE_INT val;
2951 int result;
2953 if (n == 0)
2954 return 0;
2956 /* Ignore the reciprocal when calculating the cost. */
2957 val = (n < 0) ? -n : n;
2959 /* Initialize the exponent cache. */
2960 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2961 cache[1] = true;
2963 result = 0;
2965 while (val >= POWI_TABLE_SIZE)
2967 if (val & 1)
2969 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2970 result += powi_lookup_cost (digit, cache)
2971 + POWI_WINDOW_SIZE + 1;
2972 val >>= POWI_WINDOW_SIZE;
2974 else
2976 val >>= 1;
2977 result++;
2981 return result + powi_lookup_cost (val, cache);
2984 /* Recursive subroutine of expand_powi. This function takes the array,
2985 CACHE, of already calculated exponents and an exponent N and returns
2986 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2988 static rtx
2989 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2991 unsigned HOST_WIDE_INT digit;
2992 rtx target, result;
2993 rtx op0, op1;
2995 if (n < POWI_TABLE_SIZE)
2997 if (cache[n])
2998 return cache[n];
3000 target = gen_reg_rtx (mode);
3001 cache[n] = target;
3003 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
3004 op1 = expand_powi_1 (mode, powi_table[n], cache);
3006 else if (n & 1)
3008 target = gen_reg_rtx (mode);
3009 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
3010 op0 = expand_powi_1 (mode, n - digit, cache);
3011 op1 = expand_powi_1 (mode, digit, cache);
3013 else
3015 target = gen_reg_rtx (mode);
3016 op0 = expand_powi_1 (mode, n >> 1, cache);
3017 op1 = op0;
3020 result = expand_mult (mode, op0, op1, target, 0);
3021 if (result != target)
3022 emit_move_insn (target, result);
3023 return target;
3026 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
3027 floating point operand in mode MODE, and N is the exponent. This
3028 function needs to be kept in sync with powi_cost above. */
3030 static rtx
3031 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
3033 rtx cache[POWI_TABLE_SIZE];
3034 rtx result;
3036 if (n == 0)
3037 return CONST1_RTX (mode);
3039 memset (cache, 0, sizeof (cache));
3040 cache[1] = x;
3042 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
3044 /* If the original exponent was negative, reciprocate the result. */
3045 if (n < 0)
3046 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3047 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3049 return result;
3052 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
3053 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
3054 if we can simplify it. */
3055 static rtx
3056 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
3057 rtx subtarget)
3059 if (TREE_CODE (arg1) == REAL_CST
3060 && !TREE_OVERFLOW (arg1)
3061 && flag_unsafe_math_optimizations)
3063 enum machine_mode mode = TYPE_MODE (type);
3064 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
3065 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
3066 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
3067 tree op = NULL_TREE;
3069 if (sqrtfn)
3071 /* Optimize pow (x, 0.5) into sqrt. */
3072 if (REAL_VALUES_EQUAL (c, dconsthalf))
3073 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3075 /* Don't do this optimization if we don't have a sqrt insn. */
3076 else if (optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)
3078 REAL_VALUE_TYPE dconst1_4 = dconst1;
3079 REAL_VALUE_TYPE dconst3_4;
3080 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
3082 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
3083 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
3085 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
3086 machines that a builtin sqrt instruction is smaller than a
3087 call to pow with 0.25, so do this optimization even if
3088 -Os. */
3089 if (REAL_VALUES_EQUAL (c, dconst1_4))
3091 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3092 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
3095 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
3096 are optimizing for space. */
3097 else if (optimize_insn_for_speed_p ()
3098 && !TREE_SIDE_EFFECTS (arg0)
3099 && REAL_VALUES_EQUAL (c, dconst3_4))
3101 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
3102 tree sqrt2 = builtin_save_expr (sqrt1);
3103 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
3104 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
3109 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
3110 cbrt/sqrts instead of pow (x, 1./6.). */
3111 if (cbrtfn && ! op
3112 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
3114 /* First try 1/3. */
3115 REAL_VALUE_TYPE dconst1_3
3116 = real_value_truncate (mode, dconst_third ());
3118 if (REAL_VALUES_EQUAL (c, dconst1_3))
3119 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
3121 /* Now try 1/6. */
3122 else if (optimize_insn_for_speed_p ()
3123 && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)
3125 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
3126 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
3128 if (REAL_VALUES_EQUAL (c, dconst1_6))
3130 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3131 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3136 if (op)
3137 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3140 return NULL_RTX;
3143 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3144 a normal call should be emitted rather than expanding the function
3145 in-line. EXP is the expression that is a call to the builtin
3146 function; if convenient, the result should be placed in TARGET. */
3148 static rtx
3149 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3151 tree arg0, arg1;
3152 tree fn, narg0;
3153 tree type = TREE_TYPE (exp);
3154 REAL_VALUE_TYPE cint, c, c2;
3155 HOST_WIDE_INT n;
3156 rtx op, op2;
3157 enum machine_mode mode = TYPE_MODE (type);
3159 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3160 return NULL_RTX;
3162 arg0 = CALL_EXPR_ARG (exp, 0);
3163 arg1 = CALL_EXPR_ARG (exp, 1);
3165 if (TREE_CODE (arg1) != REAL_CST
3166 || TREE_OVERFLOW (arg1))
3167 return expand_builtin_mathfn_2 (exp, target, subtarget);
3169 /* Handle constant exponents. */
3171 /* For integer valued exponents we can expand to an optimal multiplication
3172 sequence using expand_powi. */
3173 c = TREE_REAL_CST (arg1);
3174 n = real_to_integer (&c);
3175 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3176 if (real_identical (&c, &cint)
3177 && ((n >= -1 && n <= 2)
3178 || (flag_unsafe_math_optimizations
3179 && optimize_insn_for_speed_p ()
3180 && powi_cost (n) <= POWI_MAX_MULTS)))
3182 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3183 if (n != 1)
3185 op = force_reg (mode, op);
3186 op = expand_powi (op, mode, n);
3188 return op;
3191 narg0 = builtin_save_expr (arg0);
3193 /* If the exponent is not integer valued, check if it is half of an integer.
3194 In this case we can expand to sqrt (x) * x**(n/2). */
3195 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3196 if (fn != NULL_TREE)
3198 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3199 n = real_to_integer (&c2);
3200 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3201 if (real_identical (&c2, &cint)
3202 && ((flag_unsafe_math_optimizations
3203 && optimize_insn_for_speed_p ()
3204 && powi_cost (n/2) <= POWI_MAX_MULTS)
3205 /* Even the c == 0.5 case cannot be done unconditionally
3206 when we need to preserve signed zeros, as
3207 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3208 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3209 /* For c == 1.5 we can assume that x * sqrt (x) is always
3210 smaller than pow (x, 1.5) if sqrt will not be expanded
3211 as a call. */
3212 || (n == 3
3213 && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)))
3215 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3216 narg0);
3217 /* Use expand_expr in case the newly built call expression
3218 was folded to a non-call. */
3219 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3220 if (n != 1)
3222 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3223 op2 = force_reg (mode, op2);
3224 op2 = expand_powi (op2, mode, abs (n / 2));
3225 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3226 0, OPTAB_LIB_WIDEN);
3227 /* If the original exponent was negative, reciprocate the
3228 result. */
3229 if (n < 0)
3230 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3231 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3233 return op;
3237 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3238 call. */
3239 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3240 subtarget);
3241 if (op)
3242 return op;
3244 /* Try if the exponent is a third of an integer. In this case
3245 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3246 different from pow (x, 1./3.) due to rounding and behavior
3247 with negative x we need to constrain this transformation to
3248 unsafe math and positive x or finite math. */
3249 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3250 if (fn != NULL_TREE
3251 && flag_unsafe_math_optimizations
3252 && (tree_expr_nonnegative_p (arg0)
3253 || !HONOR_NANS (mode)))
3255 REAL_VALUE_TYPE dconst3;
3256 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3257 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3258 real_round (&c2, mode, &c2);
3259 n = real_to_integer (&c2);
3260 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3261 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3262 real_convert (&c2, mode, &c2);
3263 if (real_identical (&c2, &c)
3264 && ((optimize_insn_for_speed_p ()
3265 && powi_cost (n/3) <= POWI_MAX_MULTS)
3266 || n == 1))
3268 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3269 narg0);
3270 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3271 if (abs (n) % 3 == 2)
3272 op = expand_simple_binop (mode, MULT, op, op, op,
3273 0, OPTAB_LIB_WIDEN);
3274 if (n != 1)
3276 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3277 op2 = force_reg (mode, op2);
3278 op2 = expand_powi (op2, mode, abs (n / 3));
3279 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3280 0, OPTAB_LIB_WIDEN);
3281 /* If the original exponent was negative, reciprocate the
3282 result. */
3283 if (n < 0)
3284 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3285 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3287 return op;
3291 /* Fall back to optab expansion. */
3292 return expand_builtin_mathfn_2 (exp, target, subtarget);
3295 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3296 a normal call should be emitted rather than expanding the function
3297 in-line. EXP is the expression that is a call to the builtin
3298 function; if convenient, the result should be placed in TARGET. */
3300 static rtx
3301 expand_builtin_powi (tree exp, rtx target)
3303 tree arg0, arg1;
3304 rtx op0, op1;
3305 enum machine_mode mode;
3306 enum machine_mode mode2;
3308 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3309 return NULL_RTX;
3311 arg0 = CALL_EXPR_ARG (exp, 0);
3312 arg1 = CALL_EXPR_ARG (exp, 1);
3313 mode = TYPE_MODE (TREE_TYPE (exp));
3315 /* Handle constant power. */
3317 if (TREE_CODE (arg1) == INTEGER_CST
3318 && !TREE_OVERFLOW (arg1))
3320 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3322 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3323 Otherwise, check the number of multiplications required. */
3324 if ((TREE_INT_CST_HIGH (arg1) == 0
3325 || TREE_INT_CST_HIGH (arg1) == -1)
3326 && ((n >= -1 && n <= 2)
3327 || (optimize_insn_for_speed_p ()
3328 && powi_cost (n) <= POWI_MAX_MULTS)))
3330 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3331 op0 = force_reg (mode, op0);
3332 return expand_powi (op0, mode, n);
3336 /* Emit a libcall to libgcc. */
3338 /* Mode of the 2nd argument must match that of an int. */
3339 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3341 if (target == NULL_RTX)
3342 target = gen_reg_rtx (mode);
3344 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3345 if (GET_MODE (op0) != mode)
3346 op0 = convert_to_mode (mode, op0, 0);
3347 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3348 if (GET_MODE (op1) != mode2)
3349 op1 = convert_to_mode (mode2, op1, 0);
3351 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3352 target, LCT_CONST, mode, 2,
3353 op0, mode, op1, mode2);
3355 return target;
3358 /* Expand expression EXP which is a call to the strlen builtin. Return
3359 NULL_RTX if we failed the caller should emit a normal call, otherwise
3360 try to get the result in TARGET, if convenient. */
3362 static rtx
3363 expand_builtin_strlen (tree exp, rtx target,
3364 enum machine_mode target_mode)
3366 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3367 return NULL_RTX;
3368 else
3370 struct expand_operand ops[4];
3371 rtx pat;
3372 tree len;
3373 tree src = CALL_EXPR_ARG (exp, 0);
3374 rtx src_reg, before_strlen;
3375 enum machine_mode insn_mode = target_mode;
3376 enum insn_code icode = CODE_FOR_nothing;
3377 unsigned int align;
3379 /* If the length can be computed at compile-time, return it. */
3380 len = c_strlen (src, 0);
3381 if (len)
3382 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3384 /* If the length can be computed at compile-time and is constant
3385 integer, but there are side-effects in src, evaluate
3386 src for side-effects, then return len.
3387 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3388 can be optimized into: i++; x = 3; */
3389 len = c_strlen (src, 1);
3390 if (len && TREE_CODE (len) == INTEGER_CST)
3392 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3393 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3396 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3398 /* If SRC is not a pointer type, don't do this operation inline. */
3399 if (align == 0)
3400 return NULL_RTX;
3402 /* Bail out if we can't compute strlen in the right mode. */
3403 while (insn_mode != VOIDmode)
3405 icode = optab_handler (strlen_optab, insn_mode);
3406 if (icode != CODE_FOR_nothing)
3407 break;
3409 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3411 if (insn_mode == VOIDmode)
3412 return NULL_RTX;
3414 /* Make a place to hold the source address. We will not expand
3415 the actual source until we are sure that the expansion will
3416 not fail -- there are trees that cannot be expanded twice. */
3417 src_reg = gen_reg_rtx (Pmode);
3419 /* Mark the beginning of the strlen sequence so we can emit the
3420 source operand later. */
3421 before_strlen = get_last_insn ();
3423 create_output_operand (&ops[0], target, insn_mode);
3424 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3425 create_integer_operand (&ops[2], 0);
3426 create_integer_operand (&ops[3], align);
3427 if (!maybe_expand_insn (icode, 4, ops))
3428 return NULL_RTX;
3430 /* Now that we are assured of success, expand the source. */
3431 start_sequence ();
3432 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3433 if (pat != src_reg)
3434 emit_move_insn (src_reg, pat);
3435 pat = get_insns ();
3436 end_sequence ();
3438 if (before_strlen)
3439 emit_insn_after (pat, before_strlen);
3440 else
3441 emit_insn_before (pat, get_insns ());
3443 /* Return the value in the proper mode for this function. */
3444 if (GET_MODE (ops[0].value) == target_mode)
3445 target = ops[0].value;
3446 else if (target != 0)
3447 convert_move (target, ops[0].value, 0);
3448 else
3449 target = convert_to_mode (target_mode, ops[0].value, 0);
3451 return target;
3455 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3456 bytes from constant string DATA + OFFSET and return it as target
3457 constant. */
3459 static rtx
3460 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3461 enum machine_mode mode)
3463 const char *str = (const char *) data;
3465 gcc_assert (offset >= 0
3466 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3467 <= strlen (str) + 1));
3469 return c_readstr (str + offset, mode);
3472 /* Expand a call EXP to the memcpy builtin.
3473 Return NULL_RTX if we failed, the caller should emit a normal call,
3474 otherwise try to get the result in TARGET, if convenient (and in
3475 mode MODE if that's convenient). */
3477 static rtx
3478 expand_builtin_memcpy (tree exp, rtx target)
3480 if (!validate_arglist (exp,
3481 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3482 return NULL_RTX;
3483 else
3485 tree dest = CALL_EXPR_ARG (exp, 0);
3486 tree src = CALL_EXPR_ARG (exp, 1);
3487 tree len = CALL_EXPR_ARG (exp, 2);
3488 const char *src_str;
3489 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3490 unsigned int dest_align
3491 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3492 rtx dest_mem, src_mem, dest_addr, len_rtx;
3493 HOST_WIDE_INT expected_size = -1;
3494 unsigned int expected_align = 0;
3496 /* If DEST is not a pointer type, call the normal function. */
3497 if (dest_align == 0)
3498 return NULL_RTX;
3500 /* If either SRC is not a pointer type, don't do this
3501 operation in-line. */
3502 if (src_align == 0)
3503 return NULL_RTX;
3505 if (currently_expanding_gimple_stmt)
3506 stringop_block_profile (currently_expanding_gimple_stmt,
3507 &expected_align, &expected_size);
3509 if (expected_align < dest_align)
3510 expected_align = dest_align;
3511 dest_mem = get_memory_rtx (dest, len);
3512 set_mem_align (dest_mem, dest_align);
3513 len_rtx = expand_normal (len);
3514 src_str = c_getstr (src);
3516 /* If SRC is a string constant and block move would be done
3517 by pieces, we can avoid loading the string from memory
3518 and only stored the computed constants. */
3519 if (src_str
3520 && CONST_INT_P (len_rtx)
3521 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3522 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3523 CONST_CAST (char *, src_str),
3524 dest_align, false))
3526 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3527 builtin_memcpy_read_str,
3528 CONST_CAST (char *, src_str),
3529 dest_align, false, 0);
3530 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3531 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3532 return dest_mem;
3535 src_mem = get_memory_rtx (src, len);
3536 set_mem_align (src_mem, src_align);
3538 /* Copy word part most expediently. */
3539 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3540 CALL_EXPR_TAILCALL (exp)
3541 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3542 expected_align, expected_size);
3544 if (dest_addr == 0)
3546 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3547 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3549 return dest_addr;
3553 /* Expand a call EXP to the mempcpy builtin.
3554 Return NULL_RTX if we failed; the caller should emit a normal call,
3555 otherwise try to get the result in TARGET, if convenient (and in
3556 mode MODE if that's convenient). If ENDP is 0 return the
3557 destination pointer, if ENDP is 1 return the end pointer ala
3558 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3559 stpcpy. */
3561 static rtx
3562 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3564 if (!validate_arglist (exp,
3565 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3566 return NULL_RTX;
3567 else
3569 tree dest = CALL_EXPR_ARG (exp, 0);
3570 tree src = CALL_EXPR_ARG (exp, 1);
3571 tree len = CALL_EXPR_ARG (exp, 2);
3572 return expand_builtin_mempcpy_args (dest, src, len,
3573 target, mode, /*endp=*/ 1);
3577 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3578 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3579 so that this can also be called without constructing an actual CALL_EXPR.
3580 The other arguments and return value are the same as for
3581 expand_builtin_mempcpy. */
3583 static rtx
3584 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3585 rtx target, enum machine_mode mode, int endp)
3587 /* If return value is ignored, transform mempcpy into memcpy. */
3588 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3590 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3591 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3592 dest, src, len);
3593 return expand_expr (result, target, mode, EXPAND_NORMAL);
3595 else
3597 const char *src_str;
3598 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3599 unsigned int dest_align
3600 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3601 rtx dest_mem, src_mem, len_rtx;
3603 /* If either SRC or DEST is not a pointer type, don't do this
3604 operation in-line. */
3605 if (dest_align == 0 || src_align == 0)
3606 return NULL_RTX;
3608 /* If LEN is not constant, call the normal function. */
3609 if (! host_integerp (len, 1))
3610 return NULL_RTX;
3612 len_rtx = expand_normal (len);
3613 src_str = c_getstr (src);
3615 /* If SRC is a string constant and block move would be done
3616 by pieces, we can avoid loading the string from memory
3617 and only stored the computed constants. */
3618 if (src_str
3619 && CONST_INT_P (len_rtx)
3620 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3621 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3622 CONST_CAST (char *, src_str),
3623 dest_align, false))
3625 dest_mem = get_memory_rtx (dest, len);
3626 set_mem_align (dest_mem, dest_align);
3627 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3628 builtin_memcpy_read_str,
3629 CONST_CAST (char *, src_str),
3630 dest_align, false, endp);
3631 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3632 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3633 return dest_mem;
3636 if (CONST_INT_P (len_rtx)
3637 && can_move_by_pieces (INTVAL (len_rtx),
3638 MIN (dest_align, src_align)))
3640 dest_mem = get_memory_rtx (dest, len);
3641 set_mem_align (dest_mem, dest_align);
3642 src_mem = get_memory_rtx (src, len);
3643 set_mem_align (src_mem, src_align);
3644 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3645 MIN (dest_align, src_align), endp);
3646 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3647 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3648 return dest_mem;
3651 return NULL_RTX;
3655 #ifndef HAVE_movstr
3656 # define HAVE_movstr 0
3657 # define CODE_FOR_movstr CODE_FOR_nothing
3658 #endif
3660 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3661 we failed, the caller should emit a normal call, otherwise try to
3662 get the result in TARGET, if convenient. If ENDP is 0 return the
3663 destination pointer, if ENDP is 1 return the end pointer ala
3664 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3665 stpcpy. */
3667 static rtx
3668 expand_movstr (tree dest, tree src, rtx target, int endp)
3670 struct expand_operand ops[3];
3671 rtx dest_mem;
3672 rtx src_mem;
3674 if (!HAVE_movstr)
3675 return NULL_RTX;
3677 dest_mem = get_memory_rtx (dest, NULL);
3678 src_mem = get_memory_rtx (src, NULL);
3679 if (!endp)
3681 target = force_reg (Pmode, XEXP (dest_mem, 0));
3682 dest_mem = replace_equiv_address (dest_mem, target);
3685 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3686 create_fixed_operand (&ops[1], dest_mem);
3687 create_fixed_operand (&ops[2], src_mem);
3688 expand_insn (CODE_FOR_movstr, 3, ops);
3690 if (endp && target != const0_rtx)
3692 target = ops[0].value;
3693 /* movstr is supposed to set end to the address of the NUL
3694 terminator. If the caller requested a mempcpy-like return value,
3695 adjust it. */
3696 if (endp == 1)
3698 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), target), 1);
3699 emit_move_insn (target, force_operand (tem, NULL_RTX));
3702 return target;
3705 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3706 NULL_RTX if we failed the caller should emit a normal call, otherwise
3707 try to get the result in TARGET, if convenient (and in mode MODE if that's
3708 convenient). */
3710 static rtx
3711 expand_builtin_strcpy (tree exp, rtx target)
3713 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3715 tree dest = CALL_EXPR_ARG (exp, 0);
3716 tree src = CALL_EXPR_ARG (exp, 1);
3717 return expand_builtin_strcpy_args (dest, src, target);
3719 return NULL_RTX;
3722 /* Helper function to do the actual work for expand_builtin_strcpy. The
3723 arguments to the builtin_strcpy call DEST and SRC are broken out
3724 so that this can also be called without constructing an actual CALL_EXPR.
3725 The other arguments and return value are the same as for
3726 expand_builtin_strcpy. */
3728 static rtx
3729 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3731 return expand_movstr (dest, src, target, /*endp=*/0);
3734 /* Expand a call EXP to the stpcpy builtin.
3735 Return NULL_RTX if we failed the caller should emit a normal call,
3736 otherwise try to get the result in TARGET, if convenient (and in
3737 mode MODE if that's convenient). */
3739 static rtx
3740 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3742 tree dst, src;
3743 location_t loc = EXPR_LOCATION (exp);
3745 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3746 return NULL_RTX;
3748 dst = CALL_EXPR_ARG (exp, 0);
3749 src = CALL_EXPR_ARG (exp, 1);
3751 /* If return value is ignored, transform stpcpy into strcpy. */
3752 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3754 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3755 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3756 return expand_expr (result, target, mode, EXPAND_NORMAL);
3758 else
3760 tree len, lenp1;
3761 rtx ret;
3763 /* Ensure we get an actual string whose length can be evaluated at
3764 compile-time, not an expression containing a string. This is
3765 because the latter will potentially produce pessimized code
3766 when used to produce the return value. */
3767 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3768 return expand_movstr (dst, src, target, /*endp=*/2);
3770 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3771 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3772 target, mode, /*endp=*/2);
3774 if (ret)
3775 return ret;
3777 if (TREE_CODE (len) == INTEGER_CST)
3779 rtx len_rtx = expand_normal (len);
3781 if (CONST_INT_P (len_rtx))
3783 ret = expand_builtin_strcpy_args (dst, src, target);
3785 if (ret)
3787 if (! target)
3789 if (mode != VOIDmode)
3790 target = gen_reg_rtx (mode);
3791 else
3792 target = gen_reg_rtx (GET_MODE (ret));
3794 if (GET_MODE (target) != GET_MODE (ret))
3795 ret = gen_lowpart (GET_MODE (target), ret);
3797 ret = plus_constant (ret, INTVAL (len_rtx));
3798 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3799 gcc_assert (ret);
3801 return target;
3806 return expand_movstr (dst, src, target, /*endp=*/2);
3810 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3811 bytes from constant string DATA + OFFSET and return it as target
3812 constant. */
3815 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3816 enum machine_mode mode)
3818 const char *str = (const char *) data;
3820 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3821 return const0_rtx;
3823 return c_readstr (str + offset, mode);
3826 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3827 NULL_RTX if we failed the caller should emit a normal call. */
3829 static rtx
3830 expand_builtin_strncpy (tree exp, rtx target)
3832 location_t loc = EXPR_LOCATION (exp);
3834 if (validate_arglist (exp,
3835 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3837 tree dest = CALL_EXPR_ARG (exp, 0);
3838 tree src = CALL_EXPR_ARG (exp, 1);
3839 tree len = CALL_EXPR_ARG (exp, 2);
3840 tree slen = c_strlen (src, 1);
3842 /* We must be passed a constant len and src parameter. */
3843 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3844 return NULL_RTX;
3846 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3848 /* We're required to pad with trailing zeros if the requested
3849 len is greater than strlen(s2)+1. In that case try to
3850 use store_by_pieces, if it fails, punt. */
3851 if (tree_int_cst_lt (slen, len))
3853 unsigned int dest_align
3854 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3855 const char *p = c_getstr (src);
3856 rtx dest_mem;
3858 if (!p || dest_align == 0 || !host_integerp (len, 1)
3859 || !can_store_by_pieces (tree_low_cst (len, 1),
3860 builtin_strncpy_read_str,
3861 CONST_CAST (char *, p),
3862 dest_align, false))
3863 return NULL_RTX;
3865 dest_mem = get_memory_rtx (dest, len);
3866 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3867 builtin_strncpy_read_str,
3868 CONST_CAST (char *, p), dest_align, false, 0);
3869 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3870 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3871 return dest_mem;
3874 return NULL_RTX;
3877 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3878 bytes from constant string DATA + OFFSET and return it as target
3879 constant. */
3882 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3883 enum machine_mode mode)
3885 const char *c = (const char *) data;
3886 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3888 memset (p, *c, GET_MODE_SIZE (mode));
3890 return c_readstr (p, mode);
3893 /* Callback routine for store_by_pieces. Return the RTL of a register
3894 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3895 char value given in the RTL register data. For example, if mode is
3896 4 bytes wide, return the RTL for 0x01010101*data. */
3898 static rtx
3899 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3900 enum machine_mode mode)
3902 rtx target, coeff;
3903 size_t size;
3904 char *p;
3906 size = GET_MODE_SIZE (mode);
3907 if (size == 1)
3908 return (rtx) data;
3910 p = XALLOCAVEC (char, size);
3911 memset (p, 1, size);
3912 coeff = c_readstr (p, mode);
3914 target = convert_to_mode (mode, (rtx) data, 1);
3915 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3916 return force_reg (mode, target);
3919 /* Expand expression EXP, which is a call to the memset builtin. Return
3920 NULL_RTX if we failed the caller should emit a normal call, otherwise
3921 try to get the result in TARGET, if convenient (and in mode MODE if that's
3922 convenient). */
3924 static rtx
3925 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3927 if (!validate_arglist (exp,
3928 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3929 return NULL_RTX;
3930 else
3932 tree dest = CALL_EXPR_ARG (exp, 0);
3933 tree val = CALL_EXPR_ARG (exp, 1);
3934 tree len = CALL_EXPR_ARG (exp, 2);
3935 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3939 /* Helper function to do the actual work for expand_builtin_memset. The
3940 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3941 so that this can also be called without constructing an actual CALL_EXPR.
3942 The other arguments and return value are the same as for
3943 expand_builtin_memset. */
3945 static rtx
3946 expand_builtin_memset_args (tree dest, tree val, tree len,
3947 rtx target, enum machine_mode mode, tree orig_exp)
3949 tree fndecl, fn;
3950 enum built_in_function fcode;
3951 enum machine_mode val_mode;
3952 char c;
3953 unsigned int dest_align;
3954 rtx dest_mem, dest_addr, len_rtx;
3955 HOST_WIDE_INT expected_size = -1;
3956 unsigned int expected_align = 0;
3958 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3960 /* If DEST is not a pointer type, don't do this operation in-line. */
3961 if (dest_align == 0)
3962 return NULL_RTX;
3964 if (currently_expanding_gimple_stmt)
3965 stringop_block_profile (currently_expanding_gimple_stmt,
3966 &expected_align, &expected_size);
3968 if (expected_align < dest_align)
3969 expected_align = dest_align;
3971 /* If the LEN parameter is zero, return DEST. */
3972 if (integer_zerop (len))
3974 /* Evaluate and ignore VAL in case it has side-effects. */
3975 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3976 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3979 /* Stabilize the arguments in case we fail. */
3980 dest = builtin_save_expr (dest);
3981 val = builtin_save_expr (val);
3982 len = builtin_save_expr (len);
3984 len_rtx = expand_normal (len);
3985 dest_mem = get_memory_rtx (dest, len);
3986 val_mode = TYPE_MODE (unsigned_char_type_node);
3988 if (TREE_CODE (val) != INTEGER_CST)
3990 rtx val_rtx;
3992 val_rtx = expand_normal (val);
3993 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3995 /* Assume that we can memset by pieces if we can store
3996 * the coefficients by pieces (in the required modes).
3997 * We can't pass builtin_memset_gen_str as that emits RTL. */
3998 c = 1;
3999 if (host_integerp (len, 1)
4000 && can_store_by_pieces (tree_low_cst (len, 1),
4001 builtin_memset_read_str, &c, dest_align,
4002 true))
4004 val_rtx = force_reg (val_mode, val_rtx);
4005 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4006 builtin_memset_gen_str, val_rtx, dest_align,
4007 true, 0);
4009 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4010 dest_align, expected_align,
4011 expected_size))
4012 goto do_libcall;
4014 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4015 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4016 return dest_mem;
4019 if (target_char_cast (val, &c))
4020 goto do_libcall;
4022 if (c)
4024 if (host_integerp (len, 1)
4025 && can_store_by_pieces (tree_low_cst (len, 1),
4026 builtin_memset_read_str, &c, dest_align,
4027 true))
4028 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4029 builtin_memset_read_str, &c, dest_align, true, 0);
4030 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4031 gen_int_mode (c, val_mode),
4032 dest_align, expected_align,
4033 expected_size))
4034 goto do_libcall;
4036 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4037 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4038 return dest_mem;
4041 set_mem_align (dest_mem, dest_align);
4042 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4043 CALL_EXPR_TAILCALL (orig_exp)
4044 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4045 expected_align, expected_size);
4047 if (dest_addr == 0)
4049 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4050 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4053 return dest_addr;
4055 do_libcall:
4056 fndecl = get_callee_fndecl (orig_exp);
4057 fcode = DECL_FUNCTION_CODE (fndecl);
4058 if (fcode == BUILT_IN_MEMSET)
4059 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4060 dest, val, len);
4061 else if (fcode == BUILT_IN_BZERO)
4062 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4063 dest, len);
4064 else
4065 gcc_unreachable ();
4066 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4067 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4068 return expand_call (fn, target, target == const0_rtx);
4071 /* Expand expression EXP, which is a call to the bzero builtin. Return
4072 NULL_RTX if we failed the caller should emit a normal call. */
4074 static rtx
4075 expand_builtin_bzero (tree exp)
4077 tree dest, size;
4078 location_t loc = EXPR_LOCATION (exp);
4080 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4081 return NULL_RTX;
4083 dest = CALL_EXPR_ARG (exp, 0);
4084 size = CALL_EXPR_ARG (exp, 1);
4086 /* New argument list transforming bzero(ptr x, int y) to
4087 memset(ptr x, int 0, size_t y). This is done this way
4088 so that if it isn't expanded inline, we fallback to
4089 calling bzero instead of memset. */
4091 return expand_builtin_memset_args (dest, integer_zero_node,
4092 fold_convert_loc (loc, sizetype, size),
4093 const0_rtx, VOIDmode, exp);
4096 /* Expand expression EXP, which is a call to the memcmp built-in function.
4097 Return NULL_RTX if we failed and the
4098 caller should emit a normal call, otherwise try to get the result in
4099 TARGET, if convenient (and in mode MODE, if that's convenient). */
4101 static rtx
4102 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4103 ATTRIBUTE_UNUSED enum machine_mode mode)
4105 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4107 if (!validate_arglist (exp,
4108 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4109 return NULL_RTX;
4111 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4113 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4114 rtx result;
4115 rtx insn;
4116 tree arg1 = CALL_EXPR_ARG (exp, 0);
4117 tree arg2 = CALL_EXPR_ARG (exp, 1);
4118 tree len = CALL_EXPR_ARG (exp, 2);
4120 unsigned int arg1_align
4121 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4122 unsigned int arg2_align
4123 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4124 enum machine_mode insn_mode;
4126 #ifdef HAVE_cmpmemsi
4127 if (HAVE_cmpmemsi)
4128 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4129 else
4130 #endif
4131 #ifdef HAVE_cmpstrnsi
4132 if (HAVE_cmpstrnsi)
4133 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4134 else
4135 #endif
4136 return NULL_RTX;
4138 /* If we don't have POINTER_TYPE, call the function. */
4139 if (arg1_align == 0 || arg2_align == 0)
4140 return NULL_RTX;
4142 /* Make a place to write the result of the instruction. */
4143 result = target;
4144 if (! (result != 0
4145 && REG_P (result) && GET_MODE (result) == insn_mode
4146 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4147 result = gen_reg_rtx (insn_mode);
4149 arg1_rtx = get_memory_rtx (arg1, len);
4150 arg2_rtx = get_memory_rtx (arg2, len);
4151 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4153 /* Set MEM_SIZE as appropriate. */
4154 if (CONST_INT_P (arg3_rtx))
4156 set_mem_size (arg1_rtx, arg3_rtx);
4157 set_mem_size (arg2_rtx, arg3_rtx);
4160 #ifdef HAVE_cmpmemsi
4161 if (HAVE_cmpmemsi)
4162 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4163 GEN_INT (MIN (arg1_align, arg2_align)));
4164 else
4165 #endif
4166 #ifdef HAVE_cmpstrnsi
4167 if (HAVE_cmpstrnsi)
4168 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4169 GEN_INT (MIN (arg1_align, arg2_align)));
4170 else
4171 #endif
4172 gcc_unreachable ();
4174 if (insn)
4175 emit_insn (insn);
4176 else
4177 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4178 TYPE_MODE (integer_type_node), 3,
4179 XEXP (arg1_rtx, 0), Pmode,
4180 XEXP (arg2_rtx, 0), Pmode,
4181 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4182 TYPE_UNSIGNED (sizetype)),
4183 TYPE_MODE (sizetype));
4185 /* Return the value in the proper mode for this function. */
4186 mode = TYPE_MODE (TREE_TYPE (exp));
4187 if (GET_MODE (result) == mode)
4188 return result;
4189 else if (target != 0)
4191 convert_move (target, result, 0);
4192 return target;
4194 else
4195 return convert_to_mode (mode, result, 0);
4197 #endif
4199 return NULL_RTX;
4202 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4203 if we failed the caller should emit a normal call, otherwise try to get
4204 the result in TARGET, if convenient. */
4206 static rtx
4207 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4209 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4210 return NULL_RTX;
4212 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4213 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4214 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4216 rtx arg1_rtx, arg2_rtx;
4217 rtx result, insn = NULL_RTX;
4218 tree fndecl, fn;
4219 tree arg1 = CALL_EXPR_ARG (exp, 0);
4220 tree arg2 = CALL_EXPR_ARG (exp, 1);
4222 unsigned int arg1_align
4223 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4224 unsigned int arg2_align
4225 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4227 /* If we don't have POINTER_TYPE, call the function. */
4228 if (arg1_align == 0 || arg2_align == 0)
4229 return NULL_RTX;
4231 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4232 arg1 = builtin_save_expr (arg1);
4233 arg2 = builtin_save_expr (arg2);
4235 arg1_rtx = get_memory_rtx (arg1, NULL);
4236 arg2_rtx = get_memory_rtx (arg2, NULL);
4238 #ifdef HAVE_cmpstrsi
4239 /* Try to call cmpstrsi. */
4240 if (HAVE_cmpstrsi)
4242 enum machine_mode insn_mode
4243 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4245 /* Make a place to write the result of the instruction. */
4246 result = target;
4247 if (! (result != 0
4248 && REG_P (result) && GET_MODE (result) == insn_mode
4249 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4250 result = gen_reg_rtx (insn_mode);
4252 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4253 GEN_INT (MIN (arg1_align, arg2_align)));
4255 #endif
4256 #ifdef HAVE_cmpstrnsi
4257 /* Try to determine at least one length and call cmpstrnsi. */
4258 if (!insn && HAVE_cmpstrnsi)
4260 tree len;
4261 rtx arg3_rtx;
4263 enum machine_mode insn_mode
4264 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4265 tree len1 = c_strlen (arg1, 1);
4266 tree len2 = c_strlen (arg2, 1);
4268 if (len1)
4269 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4270 if (len2)
4271 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4273 /* If we don't have a constant length for the first, use the length
4274 of the second, if we know it. We don't require a constant for
4275 this case; some cost analysis could be done if both are available
4276 but neither is constant. For now, assume they're equally cheap,
4277 unless one has side effects. If both strings have constant lengths,
4278 use the smaller. */
4280 if (!len1)
4281 len = len2;
4282 else if (!len2)
4283 len = len1;
4284 else if (TREE_SIDE_EFFECTS (len1))
4285 len = len2;
4286 else if (TREE_SIDE_EFFECTS (len2))
4287 len = len1;
4288 else if (TREE_CODE (len1) != INTEGER_CST)
4289 len = len2;
4290 else if (TREE_CODE (len2) != INTEGER_CST)
4291 len = len1;
4292 else if (tree_int_cst_lt (len1, len2))
4293 len = len1;
4294 else
4295 len = len2;
4297 /* If both arguments have side effects, we cannot optimize. */
4298 if (!len || TREE_SIDE_EFFECTS (len))
4299 goto do_libcall;
4301 arg3_rtx = expand_normal (len);
4303 /* Make a place to write the result of the instruction. */
4304 result = target;
4305 if (! (result != 0
4306 && REG_P (result) && GET_MODE (result) == insn_mode
4307 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4308 result = gen_reg_rtx (insn_mode);
4310 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4311 GEN_INT (MIN (arg1_align, arg2_align)));
4313 #endif
4315 if (insn)
4317 enum machine_mode mode;
4318 emit_insn (insn);
4320 /* Return the value in the proper mode for this function. */
4321 mode = TYPE_MODE (TREE_TYPE (exp));
4322 if (GET_MODE (result) == mode)
4323 return result;
4324 if (target == 0)
4325 return convert_to_mode (mode, result, 0);
4326 convert_move (target, result, 0);
4327 return target;
4330 /* Expand the library call ourselves using a stabilized argument
4331 list to avoid re-evaluating the function's arguments twice. */
4332 #ifdef HAVE_cmpstrnsi
4333 do_libcall:
4334 #endif
4335 fndecl = get_callee_fndecl (exp);
4336 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4337 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4338 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4339 return expand_call (fn, target, target == const0_rtx);
4341 #endif
4342 return NULL_RTX;
4345 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4346 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4347 the result in TARGET, if convenient. */
4349 static rtx
4350 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4351 ATTRIBUTE_UNUSED enum machine_mode mode)
4353 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4355 if (!validate_arglist (exp,
4356 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4357 return NULL_RTX;
4359 /* If c_strlen can determine an expression for one of the string
4360 lengths, and it doesn't have side effects, then emit cmpstrnsi
4361 using length MIN(strlen(string)+1, arg3). */
4362 #ifdef HAVE_cmpstrnsi
4363 if (HAVE_cmpstrnsi)
4365 tree len, len1, len2;
4366 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4367 rtx result, insn;
4368 tree fndecl, fn;
4369 tree arg1 = CALL_EXPR_ARG (exp, 0);
4370 tree arg2 = CALL_EXPR_ARG (exp, 1);
4371 tree arg3 = CALL_EXPR_ARG (exp, 2);
4373 unsigned int arg1_align
4374 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4375 unsigned int arg2_align
4376 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4377 enum machine_mode insn_mode
4378 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4380 len1 = c_strlen (arg1, 1);
4381 len2 = c_strlen (arg2, 1);
4383 if (len1)
4384 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4385 if (len2)
4386 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4388 /* If we don't have a constant length for the first, use the length
4389 of the second, if we know it. We don't require a constant for
4390 this case; some cost analysis could be done if both are available
4391 but neither is constant. For now, assume they're equally cheap,
4392 unless one has side effects. If both strings have constant lengths,
4393 use the smaller. */
4395 if (!len1)
4396 len = len2;
4397 else if (!len2)
4398 len = len1;
4399 else if (TREE_SIDE_EFFECTS (len1))
4400 len = len2;
4401 else if (TREE_SIDE_EFFECTS (len2))
4402 len = len1;
4403 else if (TREE_CODE (len1) != INTEGER_CST)
4404 len = len2;
4405 else if (TREE_CODE (len2) != INTEGER_CST)
4406 len = len1;
4407 else if (tree_int_cst_lt (len1, len2))
4408 len = len1;
4409 else
4410 len = len2;
4412 /* If both arguments have side effects, we cannot optimize. */
4413 if (!len || TREE_SIDE_EFFECTS (len))
4414 return NULL_RTX;
4416 /* The actual new length parameter is MIN(len,arg3). */
4417 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4418 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4420 /* If we don't have POINTER_TYPE, call the function. */
4421 if (arg1_align == 0 || arg2_align == 0)
4422 return NULL_RTX;
4424 /* Make a place to write the result of the instruction. */
4425 result = target;
4426 if (! (result != 0
4427 && REG_P (result) && GET_MODE (result) == insn_mode
4428 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4429 result = gen_reg_rtx (insn_mode);
4431 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4432 arg1 = builtin_save_expr (arg1);
4433 arg2 = builtin_save_expr (arg2);
4434 len = builtin_save_expr (len);
4436 arg1_rtx = get_memory_rtx (arg1, len);
4437 arg2_rtx = get_memory_rtx (arg2, len);
4438 arg3_rtx = expand_normal (len);
4439 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4440 GEN_INT (MIN (arg1_align, arg2_align)));
4441 if (insn)
4443 emit_insn (insn);
4445 /* Return the value in the proper mode for this function. */
4446 mode = TYPE_MODE (TREE_TYPE (exp));
4447 if (GET_MODE (result) == mode)
4448 return result;
4449 if (target == 0)
4450 return convert_to_mode (mode, result, 0);
4451 convert_move (target, result, 0);
4452 return target;
4455 /* Expand the library call ourselves using a stabilized argument
4456 list to avoid re-evaluating the function's arguments twice. */
4457 fndecl = get_callee_fndecl (exp);
4458 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4459 arg1, arg2, len);
4460 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4461 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4462 return expand_call (fn, target, target == const0_rtx);
4464 #endif
4465 return NULL_RTX;
4468 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4469 if that's convenient. */
4472 expand_builtin_saveregs (void)
4474 rtx val, seq;
4476 /* Don't do __builtin_saveregs more than once in a function.
4477 Save the result of the first call and reuse it. */
4478 if (saveregs_value != 0)
4479 return saveregs_value;
4481 /* When this function is called, it means that registers must be
4482 saved on entry to this function. So we migrate the call to the
4483 first insn of this function. */
4485 start_sequence ();
4487 /* Do whatever the machine needs done in this case. */
4488 val = targetm.calls.expand_builtin_saveregs ();
4490 seq = get_insns ();
4491 end_sequence ();
4493 saveregs_value = val;
4495 /* Put the insns after the NOTE that starts the function. If this
4496 is inside a start_sequence, make the outer-level insn chain current, so
4497 the code is placed at the start of the function. */
4498 push_topmost_sequence ();
4499 emit_insn_after (seq, entry_of_function ());
4500 pop_topmost_sequence ();
4502 return val;
4505 /* Expand a call to __builtin_next_arg. */
4507 static rtx
4508 expand_builtin_next_arg (void)
4510 /* Checking arguments is already done in fold_builtin_next_arg
4511 that must be called before this function. */
4512 return expand_binop (ptr_mode, add_optab,
4513 crtl->args.internal_arg_pointer,
4514 crtl->args.arg_offset_rtx,
4515 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4518 /* Make it easier for the backends by protecting the valist argument
4519 from multiple evaluations. */
4521 static tree
4522 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4524 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4526 /* The current way of determining the type of valist is completely
4527 bogus. We should have the information on the va builtin instead. */
4528 if (!vatype)
4529 vatype = targetm.fn_abi_va_list (cfun->decl);
4531 if (TREE_CODE (vatype) == ARRAY_TYPE)
4533 if (TREE_SIDE_EFFECTS (valist))
4534 valist = save_expr (valist);
4536 /* For this case, the backends will be expecting a pointer to
4537 vatype, but it's possible we've actually been given an array
4538 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4539 So fix it. */
4540 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4542 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4543 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4546 else
4548 tree pt = build_pointer_type (vatype);
4550 if (! needs_lvalue)
4552 if (! TREE_SIDE_EFFECTS (valist))
4553 return valist;
4555 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4556 TREE_SIDE_EFFECTS (valist) = 1;
4559 if (TREE_SIDE_EFFECTS (valist))
4560 valist = save_expr (valist);
4561 valist = fold_build2_loc (loc, MEM_REF,
4562 vatype, valist, build_int_cst (pt, 0));
4565 return valist;
4568 /* The "standard" definition of va_list is void*. */
4570 tree
4571 std_build_builtin_va_list (void)
4573 return ptr_type_node;
4576 /* The "standard" abi va_list is va_list_type_node. */
4578 tree
4579 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4581 return va_list_type_node;
4584 /* The "standard" type of va_list is va_list_type_node. */
4586 tree
4587 std_canonical_va_list_type (tree type)
4589 tree wtype, htype;
4591 if (INDIRECT_REF_P (type))
4592 type = TREE_TYPE (type);
4593 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4594 type = TREE_TYPE (type);
4595 wtype = va_list_type_node;
4596 htype = type;
4597 /* Treat structure va_list types. */
4598 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4599 htype = TREE_TYPE (htype);
4600 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4602 /* If va_list is an array type, the argument may have decayed
4603 to a pointer type, e.g. by being passed to another function.
4604 In that case, unwrap both types so that we can compare the
4605 underlying records. */
4606 if (TREE_CODE (htype) == ARRAY_TYPE
4607 || POINTER_TYPE_P (htype))
4609 wtype = TREE_TYPE (wtype);
4610 htype = TREE_TYPE (htype);
4613 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4614 return va_list_type_node;
4616 return NULL_TREE;
4619 /* The "standard" implementation of va_start: just assign `nextarg' to
4620 the variable. */
4622 void
4623 std_expand_builtin_va_start (tree valist, rtx nextarg)
4625 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4626 convert_move (va_r, nextarg, 0);
4629 /* Expand EXP, a call to __builtin_va_start. */
4631 static rtx
4632 expand_builtin_va_start (tree exp)
4634 rtx nextarg;
4635 tree valist;
4636 location_t loc = EXPR_LOCATION (exp);
4638 if (call_expr_nargs (exp) < 2)
4640 error_at (loc, "too few arguments to function %<va_start%>");
4641 return const0_rtx;
4644 if (fold_builtin_next_arg (exp, true))
4645 return const0_rtx;
4647 nextarg = expand_builtin_next_arg ();
4648 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4650 if (targetm.expand_builtin_va_start)
4651 targetm.expand_builtin_va_start (valist, nextarg);
4652 else
4653 std_expand_builtin_va_start (valist, nextarg);
4655 return const0_rtx;
4658 /* The "standard" implementation of va_arg: read the value from the
4659 current (padded) address and increment by the (padded) size. */
4661 tree
4662 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4663 gimple_seq *post_p)
4665 tree addr, t, type_size, rounded_size, valist_tmp;
4666 unsigned HOST_WIDE_INT align, boundary;
4667 bool indirect;
4669 #ifdef ARGS_GROW_DOWNWARD
4670 /* All of the alignment and movement below is for args-grow-up machines.
4671 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4672 implement their own specialized gimplify_va_arg_expr routines. */
4673 gcc_unreachable ();
4674 #endif
4676 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4677 if (indirect)
4678 type = build_pointer_type (type);
4680 align = PARM_BOUNDARY / BITS_PER_UNIT;
4681 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4683 /* When we align parameter on stack for caller, if the parameter
4684 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4685 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4686 here with caller. */
4687 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4688 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4690 boundary /= BITS_PER_UNIT;
4692 /* Hoist the valist value into a temporary for the moment. */
4693 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4695 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4696 requires greater alignment, we must perform dynamic alignment. */
4697 if (boundary > align
4698 && !integer_zerop (TYPE_SIZE (type)))
4700 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4701 fold_build2 (POINTER_PLUS_EXPR,
4702 TREE_TYPE (valist),
4703 valist_tmp, size_int (boundary - 1)));
4704 gimplify_and_add (t, pre_p);
4706 t = fold_convert (sizetype, valist_tmp);
4707 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4708 fold_convert (TREE_TYPE (valist),
4709 fold_build2 (BIT_AND_EXPR, sizetype, t,
4710 size_int (-boundary))));
4711 gimplify_and_add (t, pre_p);
4713 else
4714 boundary = align;
4716 /* If the actual alignment is less than the alignment of the type,
4717 adjust the type accordingly so that we don't assume strict alignment
4718 when dereferencing the pointer. */
4719 boundary *= BITS_PER_UNIT;
4720 if (boundary < TYPE_ALIGN (type))
4722 type = build_variant_type_copy (type);
4723 TYPE_ALIGN (type) = boundary;
4726 /* Compute the rounded size of the type. */
4727 type_size = size_in_bytes (type);
4728 rounded_size = round_up (type_size, align);
4730 /* Reduce rounded_size so it's sharable with the postqueue. */
4731 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4733 /* Get AP. */
4734 addr = valist_tmp;
4735 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4737 /* Small args are padded downward. */
4738 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4739 rounded_size, size_int (align));
4740 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4741 size_binop (MINUS_EXPR, rounded_size, type_size));
4742 addr = fold_build2 (POINTER_PLUS_EXPR,
4743 TREE_TYPE (addr), addr, t);
4746 /* Compute new value for AP. */
4747 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4748 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4749 gimplify_and_add (t, pre_p);
4751 addr = fold_convert (build_pointer_type (type), addr);
4753 if (indirect)
4754 addr = build_va_arg_indirect_ref (addr);
4756 return build_va_arg_indirect_ref (addr);
4759 /* Build an indirect-ref expression over the given TREE, which represents a
4760 piece of a va_arg() expansion. */
4761 tree
4762 build_va_arg_indirect_ref (tree addr)
4764 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4766 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4767 mf_mark (addr);
4769 return addr;
4772 /* Return a dummy expression of type TYPE in order to keep going after an
4773 error. */
4775 static tree
4776 dummy_object (tree type)
4778 tree t = build_int_cst (build_pointer_type (type), 0);
4779 return build2 (MEM_REF, type, t, t);
4782 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4783 builtin function, but a very special sort of operator. */
4785 enum gimplify_status
4786 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4788 tree promoted_type, have_va_type;
4789 tree valist = TREE_OPERAND (*expr_p, 0);
4790 tree type = TREE_TYPE (*expr_p);
4791 tree t;
4792 location_t loc = EXPR_LOCATION (*expr_p);
4794 /* Verify that valist is of the proper type. */
4795 have_va_type = TREE_TYPE (valist);
4796 if (have_va_type == error_mark_node)
4797 return GS_ERROR;
4798 have_va_type = targetm.canonical_va_list_type (have_va_type);
4800 if (have_va_type == NULL_TREE)
4802 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4803 return GS_ERROR;
4806 /* Generate a diagnostic for requesting data of a type that cannot
4807 be passed through `...' due to type promotion at the call site. */
4808 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4809 != type)
4811 static bool gave_help;
4812 bool warned;
4814 /* Unfortunately, this is merely undefined, rather than a constraint
4815 violation, so we cannot make this an error. If this call is never
4816 executed, the program is still strictly conforming. */
4817 warned = warning_at (loc, 0,
4818 "%qT is promoted to %qT when passed through %<...%>",
4819 type, promoted_type);
4820 if (!gave_help && warned)
4822 gave_help = true;
4823 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4824 promoted_type, type);
4827 /* We can, however, treat "undefined" any way we please.
4828 Call abort to encourage the user to fix the program. */
4829 if (warned)
4830 inform (loc, "if this code is reached, the program will abort");
4831 /* Before the abort, allow the evaluation of the va_list
4832 expression to exit or longjmp. */
4833 gimplify_and_add (valist, pre_p);
4834 t = build_call_expr_loc (loc,
4835 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4836 gimplify_and_add (t, pre_p);
4838 /* This is dead code, but go ahead and finish so that the
4839 mode of the result comes out right. */
4840 *expr_p = dummy_object (type);
4841 return GS_ALL_DONE;
4843 else
4845 /* Make it easier for the backends by protecting the valist argument
4846 from multiple evaluations. */
4847 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4849 /* For this case, the backends will be expecting a pointer to
4850 TREE_TYPE (abi), but it's possible we've
4851 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4852 So fix it. */
4853 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4855 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4856 valist = fold_convert_loc (loc, p1,
4857 build_fold_addr_expr_loc (loc, valist));
4860 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4862 else
4863 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4865 if (!targetm.gimplify_va_arg_expr)
4866 /* FIXME: Once most targets are converted we should merely
4867 assert this is non-null. */
4868 return GS_ALL_DONE;
4870 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4871 return GS_OK;
4875 /* Expand EXP, a call to __builtin_va_end. */
4877 static rtx
4878 expand_builtin_va_end (tree exp)
4880 tree valist = CALL_EXPR_ARG (exp, 0);
4882 /* Evaluate for side effects, if needed. I hate macros that don't
4883 do that. */
4884 if (TREE_SIDE_EFFECTS (valist))
4885 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4887 return const0_rtx;
4890 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4891 builtin rather than just as an assignment in stdarg.h because of the
4892 nastiness of array-type va_list types. */
4894 static rtx
4895 expand_builtin_va_copy (tree exp)
4897 tree dst, src, t;
4898 location_t loc = EXPR_LOCATION (exp);
4900 dst = CALL_EXPR_ARG (exp, 0);
4901 src = CALL_EXPR_ARG (exp, 1);
4903 dst = stabilize_va_list_loc (loc, dst, 1);
4904 src = stabilize_va_list_loc (loc, src, 0);
4906 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4908 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4910 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4911 TREE_SIDE_EFFECTS (t) = 1;
4912 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4914 else
4916 rtx dstb, srcb, size;
4918 /* Evaluate to pointers. */
4919 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4920 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4921 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4922 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4924 dstb = convert_memory_address (Pmode, dstb);
4925 srcb = convert_memory_address (Pmode, srcb);
4927 /* "Dereference" to BLKmode memories. */
4928 dstb = gen_rtx_MEM (BLKmode, dstb);
4929 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4930 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4931 srcb = gen_rtx_MEM (BLKmode, srcb);
4932 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4933 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4935 /* Copy. */
4936 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4939 return const0_rtx;
4942 /* Expand a call to one of the builtin functions __builtin_frame_address or
4943 __builtin_return_address. */
4945 static rtx
4946 expand_builtin_frame_address (tree fndecl, tree exp)
4948 /* The argument must be a nonnegative integer constant.
4949 It counts the number of frames to scan up the stack.
4950 The value is the return address saved in that frame. */
4951 if (call_expr_nargs (exp) == 0)
4952 /* Warning about missing arg was already issued. */
4953 return const0_rtx;
4954 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4956 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4957 error ("invalid argument to %<__builtin_frame_address%>");
4958 else
4959 error ("invalid argument to %<__builtin_return_address%>");
4960 return const0_rtx;
4962 else
4964 rtx tem
4965 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4966 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4968 /* Some ports cannot access arbitrary stack frames. */
4969 if (tem == NULL)
4971 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4972 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4973 else
4974 warning (0, "unsupported argument to %<__builtin_return_address%>");
4975 return const0_rtx;
4978 /* For __builtin_frame_address, return what we've got. */
4979 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4980 return tem;
4982 if (!REG_P (tem)
4983 && ! CONSTANT_P (tem))
4984 tem = copy_to_mode_reg (Pmode, tem);
4985 return tem;
4989 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4990 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4991 is the same as for allocate_dynamic_stack_space. */
4993 static rtx
4994 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4996 rtx op0;
4997 rtx result;
4999 /* Emit normal call if marked not-inlineable. */
5000 if (CALL_CANNOT_INLINE_P (exp))
5001 return NULL_RTX;
5003 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5004 return NULL_RTX;
5006 /* Compute the argument. */
5007 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5009 /* Allocate the desired space. */
5010 result = allocate_dynamic_stack_space (op0, 0, BIGGEST_ALIGNMENT,
5011 cannot_accumulate);
5012 result = convert_memory_address (ptr_mode, result);
5014 return result;
5017 /* Expand a call to a bswap builtin with argument ARG0. MODE
5018 is the mode to expand with. */
5020 static rtx
5021 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5023 enum machine_mode mode;
5024 tree arg;
5025 rtx op0;
5027 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5028 return NULL_RTX;
5030 arg = CALL_EXPR_ARG (exp, 0);
5031 mode = TYPE_MODE (TREE_TYPE (arg));
5032 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5034 target = expand_unop (mode, bswap_optab, op0, target, 1);
5036 gcc_assert (target);
5038 return convert_to_mode (mode, target, 0);
5041 /* Expand a call to a unary builtin in EXP.
5042 Return NULL_RTX if a normal call should be emitted rather than expanding the
5043 function in-line. If convenient, the result should be placed in TARGET.
5044 SUBTARGET may be used as the target for computing one of EXP's operands. */
5046 static rtx
5047 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5048 rtx subtarget, optab op_optab)
5050 rtx op0;
5052 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5053 return NULL_RTX;
5055 /* Compute the argument. */
5056 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5057 (subtarget
5058 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5059 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5060 VOIDmode, EXPAND_NORMAL);
5061 /* Compute op, into TARGET if possible.
5062 Set TARGET to wherever the result comes back. */
5063 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5064 op_optab, op0, target, 1);
5065 gcc_assert (target);
5067 return convert_to_mode (target_mode, target, 0);
5070 /* Expand a call to __builtin_expect. We just return our argument
5071 as the builtin_expect semantic should've been already executed by
5072 tree branch prediction pass. */
5074 static rtx
5075 expand_builtin_expect (tree exp, rtx target)
5077 tree arg;
5079 if (call_expr_nargs (exp) < 2)
5080 return const0_rtx;
5081 arg = CALL_EXPR_ARG (exp, 0);
5083 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5084 /* When guessing was done, the hints should be already stripped away. */
5085 gcc_assert (!flag_guess_branch_prob
5086 || optimize == 0 || seen_error ());
5087 return target;
5090 void
5091 expand_builtin_trap (void)
5093 #ifdef HAVE_trap
5094 if (HAVE_trap)
5095 emit_insn (gen_trap ());
5096 else
5097 #endif
5098 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5099 emit_barrier ();
5102 /* Expand a call to __builtin_unreachable. We do nothing except emit
5103 a barrier saying that control flow will not pass here.
5105 It is the responsibility of the program being compiled to ensure
5106 that control flow does never reach __builtin_unreachable. */
5107 static void
5108 expand_builtin_unreachable (void)
5110 emit_barrier ();
5113 /* Expand EXP, a call to fabs, fabsf or fabsl.
5114 Return NULL_RTX if a normal call should be emitted rather than expanding
5115 the function inline. If convenient, the result should be placed
5116 in TARGET. SUBTARGET may be used as the target for computing
5117 the operand. */
5119 static rtx
5120 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5122 enum machine_mode mode;
5123 tree arg;
5124 rtx op0;
5126 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5127 return NULL_RTX;
5129 arg = CALL_EXPR_ARG (exp, 0);
5130 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5131 mode = TYPE_MODE (TREE_TYPE (arg));
5132 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5133 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5136 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5137 Return NULL is a normal call should be emitted rather than expanding the
5138 function inline. If convenient, the result should be placed in TARGET.
5139 SUBTARGET may be used as the target for computing the operand. */
5141 static rtx
5142 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5144 rtx op0, op1;
5145 tree arg;
5147 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5148 return NULL_RTX;
5150 arg = CALL_EXPR_ARG (exp, 0);
5151 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5153 arg = CALL_EXPR_ARG (exp, 1);
5154 op1 = expand_normal (arg);
5156 return expand_copysign (op0, op1, target);
5159 /* Create a new constant string literal and return a char* pointer to it.
5160 The STRING_CST value is the LEN characters at STR. */
5161 tree
5162 build_string_literal (int len, const char *str)
5164 tree t, elem, index, type;
5166 t = build_string (len, str);
5167 elem = build_type_variant (char_type_node, 1, 0);
5168 index = build_index_type (size_int (len - 1));
5169 type = build_array_type (elem, index);
5170 TREE_TYPE (t) = type;
5171 TREE_CONSTANT (t) = 1;
5172 TREE_READONLY (t) = 1;
5173 TREE_STATIC (t) = 1;
5175 type = build_pointer_type (elem);
5176 t = build1 (ADDR_EXPR, type,
5177 build4 (ARRAY_REF, elem,
5178 t, integer_zero_node, NULL_TREE, NULL_TREE));
5179 return t;
5182 /* Expand a call to __builtin___clear_cache. */
5184 static rtx
5185 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5187 #ifndef HAVE_clear_cache
5188 #ifdef CLEAR_INSN_CACHE
5189 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5190 does something. Just do the default expansion to a call to
5191 __clear_cache(). */
5192 return NULL_RTX;
5193 #else
5194 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5195 does nothing. There is no need to call it. Do nothing. */
5196 return const0_rtx;
5197 #endif /* CLEAR_INSN_CACHE */
5198 #else
5199 /* We have a "clear_cache" insn, and it will handle everything. */
5200 tree begin, end;
5201 rtx begin_rtx, end_rtx;
5203 /* We must not expand to a library call. If we did, any
5204 fallback library function in libgcc that might contain a call to
5205 __builtin___clear_cache() would recurse infinitely. */
5206 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5208 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5209 return const0_rtx;
5212 if (HAVE_clear_cache)
5214 struct expand_operand ops[2];
5216 begin = CALL_EXPR_ARG (exp, 0);
5217 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5219 end = CALL_EXPR_ARG (exp, 1);
5220 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5222 create_address_operand (&ops[0], begin_rtx);
5223 create_address_operand (&ops[1], end_rtx);
5224 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
5225 return const0_rtx;
5227 return const0_rtx;
5228 #endif /* HAVE_clear_cache */
5231 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5233 static rtx
5234 round_trampoline_addr (rtx tramp)
5236 rtx temp, addend, mask;
5238 /* If we don't need too much alignment, we'll have been guaranteed
5239 proper alignment by get_trampoline_type. */
5240 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5241 return tramp;
5243 /* Round address up to desired boundary. */
5244 temp = gen_reg_rtx (Pmode);
5245 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5246 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5248 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5249 temp, 0, OPTAB_LIB_WIDEN);
5250 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5251 temp, 0, OPTAB_LIB_WIDEN);
5253 return tramp;
5256 static rtx
5257 expand_builtin_init_trampoline (tree exp)
5259 tree t_tramp, t_func, t_chain;
5260 rtx m_tramp, r_tramp, r_chain, tmp;
5262 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5263 POINTER_TYPE, VOID_TYPE))
5264 return NULL_RTX;
5266 t_tramp = CALL_EXPR_ARG (exp, 0);
5267 t_func = CALL_EXPR_ARG (exp, 1);
5268 t_chain = CALL_EXPR_ARG (exp, 2);
5270 r_tramp = expand_normal (t_tramp);
5271 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5272 MEM_NOTRAP_P (m_tramp) = 1;
5274 /* The TRAMP argument should be the address of a field within the
5275 local function's FRAME decl. Let's see if we can fill in the
5276 to fill in the MEM_ATTRs for this memory. */
5277 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5278 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5279 true, 0);
5281 tmp = round_trampoline_addr (r_tramp);
5282 if (tmp != r_tramp)
5284 m_tramp = change_address (m_tramp, BLKmode, tmp);
5285 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5286 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5289 /* The FUNC argument should be the address of the nested function.
5290 Extract the actual function decl to pass to the hook. */
5291 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5292 t_func = TREE_OPERAND (t_func, 0);
5293 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5295 r_chain = expand_normal (t_chain);
5297 /* Generate insns to initialize the trampoline. */
5298 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5300 trampolines_created = 1;
5302 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5303 "trampoline generated for nested function %qD", t_func);
5305 return const0_rtx;
5308 static rtx
5309 expand_builtin_adjust_trampoline (tree exp)
5311 rtx tramp;
5313 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5314 return NULL_RTX;
5316 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5317 tramp = round_trampoline_addr (tramp);
5318 if (targetm.calls.trampoline_adjust_address)
5319 tramp = targetm.calls.trampoline_adjust_address (tramp);
5321 return tramp;
5324 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5325 function. The function first checks whether the back end provides
5326 an insn to implement signbit for the respective mode. If not, it
5327 checks whether the floating point format of the value is such that
5328 the sign bit can be extracted. If that is not the case, the
5329 function returns NULL_RTX to indicate that a normal call should be
5330 emitted rather than expanding the function in-line. EXP is the
5331 expression that is a call to the builtin function; if convenient,
5332 the result should be placed in TARGET. */
5333 static rtx
5334 expand_builtin_signbit (tree exp, rtx target)
5336 const struct real_format *fmt;
5337 enum machine_mode fmode, imode, rmode;
5338 tree arg;
5339 int word, bitpos;
5340 enum insn_code icode;
5341 rtx temp;
5342 location_t loc = EXPR_LOCATION (exp);
5344 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5345 return NULL_RTX;
5347 arg = CALL_EXPR_ARG (exp, 0);
5348 fmode = TYPE_MODE (TREE_TYPE (arg));
5349 rmode = TYPE_MODE (TREE_TYPE (exp));
5350 fmt = REAL_MODE_FORMAT (fmode);
5352 arg = builtin_save_expr (arg);
5354 /* Expand the argument yielding a RTX expression. */
5355 temp = expand_normal (arg);
5357 /* Check if the back end provides an insn that handles signbit for the
5358 argument's mode. */
5359 icode = optab_handler (signbit_optab, fmode);
5360 if (icode != CODE_FOR_nothing)
5362 rtx last = get_last_insn ();
5363 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5364 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5365 return target;
5366 delete_insns_since (last);
5369 /* For floating point formats without a sign bit, implement signbit
5370 as "ARG < 0.0". */
5371 bitpos = fmt->signbit_ro;
5372 if (bitpos < 0)
5374 /* But we can't do this if the format supports signed zero. */
5375 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5376 return NULL_RTX;
5378 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5379 build_real (TREE_TYPE (arg), dconst0));
5380 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5383 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5385 imode = int_mode_for_mode (fmode);
5386 if (imode == BLKmode)
5387 return NULL_RTX;
5388 temp = gen_lowpart (imode, temp);
5390 else
5392 imode = word_mode;
5393 /* Handle targets with different FP word orders. */
5394 if (FLOAT_WORDS_BIG_ENDIAN)
5395 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5396 else
5397 word = bitpos / BITS_PER_WORD;
5398 temp = operand_subword_force (temp, word, fmode);
5399 bitpos = bitpos % BITS_PER_WORD;
5402 /* Force the intermediate word_mode (or narrower) result into a
5403 register. This avoids attempting to create paradoxical SUBREGs
5404 of floating point modes below. */
5405 temp = force_reg (imode, temp);
5407 /* If the bitpos is within the "result mode" lowpart, the operation
5408 can be implement with a single bitwise AND. Otherwise, we need
5409 a right shift and an AND. */
5411 if (bitpos < GET_MODE_BITSIZE (rmode))
5413 double_int mask = double_int_setbit (double_int_zero, bitpos);
5415 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5416 temp = gen_lowpart (rmode, temp);
5417 temp = expand_binop (rmode, and_optab, temp,
5418 immed_double_int_const (mask, rmode),
5419 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5421 else
5423 /* Perform a logical right shift to place the signbit in the least
5424 significant bit, then truncate the result to the desired mode
5425 and mask just this bit. */
5426 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5427 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5428 temp = gen_lowpart (rmode, temp);
5429 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5430 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5433 return temp;
5436 /* Expand fork or exec calls. TARGET is the desired target of the
5437 call. EXP is the call. FN is the
5438 identificator of the actual function. IGNORE is nonzero if the
5439 value is to be ignored. */
5441 static rtx
5442 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5444 tree id, decl;
5445 tree call;
5447 /* If we are not profiling, just call the function. */
5448 if (!profile_arc_flag)
5449 return NULL_RTX;
5451 /* Otherwise call the wrapper. This should be equivalent for the rest of
5452 compiler, so the code does not diverge, and the wrapper may run the
5453 code necessary for keeping the profiling sane. */
5455 switch (DECL_FUNCTION_CODE (fn))
5457 case BUILT_IN_FORK:
5458 id = get_identifier ("__gcov_fork");
5459 break;
5461 case BUILT_IN_EXECL:
5462 id = get_identifier ("__gcov_execl");
5463 break;
5465 case BUILT_IN_EXECV:
5466 id = get_identifier ("__gcov_execv");
5467 break;
5469 case BUILT_IN_EXECLP:
5470 id = get_identifier ("__gcov_execlp");
5471 break;
5473 case BUILT_IN_EXECLE:
5474 id = get_identifier ("__gcov_execle");
5475 break;
5477 case BUILT_IN_EXECVP:
5478 id = get_identifier ("__gcov_execvp");
5479 break;
5481 case BUILT_IN_EXECVE:
5482 id = get_identifier ("__gcov_execve");
5483 break;
5485 default:
5486 gcc_unreachable ();
5489 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5490 FUNCTION_DECL, id, TREE_TYPE (fn));
5491 DECL_EXTERNAL (decl) = 1;
5492 TREE_PUBLIC (decl) = 1;
5493 DECL_ARTIFICIAL (decl) = 1;
5494 TREE_NOTHROW (decl) = 1;
5495 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5496 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5497 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5498 return expand_call (call, target, ignore);
5503 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5504 the pointer in these functions is void*, the tree optimizers may remove
5505 casts. The mode computed in expand_builtin isn't reliable either, due
5506 to __sync_bool_compare_and_swap.
5508 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5509 group of builtins. This gives us log2 of the mode size. */
5511 static inline enum machine_mode
5512 get_builtin_sync_mode (int fcode_diff)
5514 /* The size is not negotiable, so ask not to get BLKmode in return
5515 if the target indicates that a smaller size would be better. */
5516 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5519 /* Expand the memory expression LOC and return the appropriate memory operand
5520 for the builtin_sync operations. */
5522 static rtx
5523 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5525 rtx addr, mem;
5527 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5528 addr = convert_memory_address (Pmode, addr);
5530 /* Note that we explicitly do not want any alias information for this
5531 memory, so that we kill all other live memories. Otherwise we don't
5532 satisfy the full barrier semantics of the intrinsic. */
5533 mem = validize_mem (gen_rtx_MEM (mode, addr));
5535 /* The alignment needs to be at least according to that of the mode. */
5536 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5537 get_pointer_alignment (loc, BIGGEST_ALIGNMENT)));
5538 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5539 MEM_VOLATILE_P (mem) = 1;
5541 return mem;
5544 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5545 EXP is the CALL_EXPR. CODE is the rtx code
5546 that corresponds to the arithmetic or logical operation from the name;
5547 an exception here is that NOT actually means NAND. TARGET is an optional
5548 place for us to store the results; AFTER is true if this is the
5549 fetch_and_xxx form. IGNORE is true if we don't actually care about
5550 the result of the operation at all. */
5552 static rtx
5553 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5554 enum rtx_code code, bool after,
5555 rtx target, bool ignore)
5557 rtx val, mem;
5558 enum machine_mode old_mode;
5559 location_t loc = EXPR_LOCATION (exp);
5561 if (code == NOT && warn_sync_nand)
5563 tree fndecl = get_callee_fndecl (exp);
5564 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5566 static bool warned_f_a_n, warned_n_a_f;
5568 switch (fcode)
5570 case BUILT_IN_FETCH_AND_NAND_1:
5571 case BUILT_IN_FETCH_AND_NAND_2:
5572 case BUILT_IN_FETCH_AND_NAND_4:
5573 case BUILT_IN_FETCH_AND_NAND_8:
5574 case BUILT_IN_FETCH_AND_NAND_16:
5576 if (warned_f_a_n)
5577 break;
5579 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5580 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5581 warned_f_a_n = true;
5582 break;
5584 case BUILT_IN_NAND_AND_FETCH_1:
5585 case BUILT_IN_NAND_AND_FETCH_2:
5586 case BUILT_IN_NAND_AND_FETCH_4:
5587 case BUILT_IN_NAND_AND_FETCH_8:
5588 case BUILT_IN_NAND_AND_FETCH_16:
5590 if (warned_n_a_f)
5591 break;
5593 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5594 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5595 warned_n_a_f = true;
5596 break;
5598 default:
5599 gcc_unreachable ();
5603 /* Expand the operands. */
5604 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5606 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5607 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5608 of CONST_INTs, where we know the old_mode only from the call argument. */
5609 old_mode = GET_MODE (val);
5610 if (old_mode == VOIDmode)
5611 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5612 val = convert_modes (mode, old_mode, val, 1);
5614 if (ignore)
5615 return expand_sync_operation (mem, val, code);
5616 else
5617 return expand_sync_fetch_operation (mem, val, code, after, target);
5620 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5621 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5622 true if this is the boolean form. TARGET is a place for us to store the
5623 results; this is NOT optional if IS_BOOL is true. */
5625 static rtx
5626 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5627 bool is_bool, rtx target)
5629 rtx old_val, new_val, mem;
5630 enum machine_mode old_mode;
5632 /* Expand the operands. */
5633 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5636 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5637 mode, EXPAND_NORMAL);
5638 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5639 of CONST_INTs, where we know the old_mode only from the call argument. */
5640 old_mode = GET_MODE (old_val);
5641 if (old_mode == VOIDmode)
5642 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5643 old_val = convert_modes (mode, old_mode, old_val, 1);
5645 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5646 mode, EXPAND_NORMAL);
5647 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5648 of CONST_INTs, where we know the old_mode only from the call argument. */
5649 old_mode = GET_MODE (new_val);
5650 if (old_mode == VOIDmode)
5651 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5652 new_val = convert_modes (mode, old_mode, new_val, 1);
5654 if (is_bool)
5655 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5656 else
5657 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5660 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5661 general form is actually an atomic exchange, and some targets only
5662 support a reduced form with the second argument being a constant 1.
5663 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5664 the results. */
5666 static rtx
5667 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5668 rtx target)
5670 rtx val, mem;
5671 enum machine_mode old_mode;
5673 /* Expand the operands. */
5674 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5675 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5676 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5677 of CONST_INTs, where we know the old_mode only from the call argument. */
5678 old_mode = GET_MODE (val);
5679 if (old_mode == VOIDmode)
5680 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5681 val = convert_modes (mode, old_mode, val, 1);
5683 return expand_sync_lock_test_and_set (mem, val, target);
5686 /* Expand the __sync_synchronize intrinsic. */
5688 static void
5689 expand_builtin_synchronize (void)
5691 gimple x;
5692 VEC (tree, gc) *v_clobbers;
5694 #ifdef HAVE_memory_barrier
5695 if (HAVE_memory_barrier)
5697 emit_insn (gen_memory_barrier ());
5698 return;
5700 #endif
5702 if (synchronize_libfunc != NULL_RTX)
5704 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5705 return;
5708 /* If no explicit memory barrier instruction is available, create an
5709 empty asm stmt with a memory clobber. */
5710 v_clobbers = VEC_alloc (tree, gc, 1);
5711 VEC_quick_push (tree, v_clobbers,
5712 tree_cons (NULL, build_string (6, "memory"), NULL));
5713 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5714 gimple_asm_set_volatile (x, true);
5715 expand_asm_stmt (x);
5718 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5720 static void
5721 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5723 struct expand_operand ops[2];
5724 enum insn_code icode;
5725 rtx mem;
5727 /* Expand the operands. */
5728 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5730 /* If there is an explicit operation in the md file, use it. */
5731 icode = direct_optab_handler (sync_lock_release_optab, mode);
5732 if (icode != CODE_FOR_nothing)
5734 create_fixed_operand (&ops[0], mem);
5735 create_input_operand (&ops[1], const0_rtx, mode);
5736 if (maybe_expand_insn (icode, 2, ops))
5737 return;
5740 /* Otherwise we can implement this operation by emitting a barrier
5741 followed by a store of zero. */
5742 expand_builtin_synchronize ();
5743 emit_move_insn (mem, const0_rtx);
5746 /* Expand an expression EXP that calls a built-in function,
5747 with result going to TARGET if that's convenient
5748 (and in mode MODE if that's convenient).
5749 SUBTARGET may be used as the target for computing one of EXP's operands.
5750 IGNORE is nonzero if the value is to be ignored. */
5753 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5754 int ignore)
5756 tree fndecl = get_callee_fndecl (exp);
5757 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5758 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5759 int flags;
5761 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5762 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5764 /* When not optimizing, generate calls to library functions for a certain
5765 set of builtins. */
5766 if (!optimize
5767 && !called_as_built_in (fndecl)
5768 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5769 && fcode != BUILT_IN_ALLOCA
5770 && fcode != BUILT_IN_FREE)
5771 return expand_call (exp, target, ignore);
5773 /* The built-in function expanders test for target == const0_rtx
5774 to determine whether the function's result will be ignored. */
5775 if (ignore)
5776 target = const0_rtx;
5778 /* If the result of a pure or const built-in function is ignored, and
5779 none of its arguments are volatile, we can avoid expanding the
5780 built-in call and just evaluate the arguments for side-effects. */
5781 if (target == const0_rtx
5782 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5783 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5785 bool volatilep = false;
5786 tree arg;
5787 call_expr_arg_iterator iter;
5789 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5790 if (TREE_THIS_VOLATILE (arg))
5792 volatilep = true;
5793 break;
5796 if (! volatilep)
5798 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5799 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5800 return const0_rtx;
5804 switch (fcode)
5806 CASE_FLT_FN (BUILT_IN_FABS):
5807 target = expand_builtin_fabs (exp, target, subtarget);
5808 if (target)
5809 return target;
5810 break;
5812 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5813 target = expand_builtin_copysign (exp, target, subtarget);
5814 if (target)
5815 return target;
5816 break;
5818 /* Just do a normal library call if we were unable to fold
5819 the values. */
5820 CASE_FLT_FN (BUILT_IN_CABS):
5821 break;
5823 CASE_FLT_FN (BUILT_IN_EXP):
5824 CASE_FLT_FN (BUILT_IN_EXP10):
5825 CASE_FLT_FN (BUILT_IN_POW10):
5826 CASE_FLT_FN (BUILT_IN_EXP2):
5827 CASE_FLT_FN (BUILT_IN_EXPM1):
5828 CASE_FLT_FN (BUILT_IN_LOGB):
5829 CASE_FLT_FN (BUILT_IN_LOG):
5830 CASE_FLT_FN (BUILT_IN_LOG10):
5831 CASE_FLT_FN (BUILT_IN_LOG2):
5832 CASE_FLT_FN (BUILT_IN_LOG1P):
5833 CASE_FLT_FN (BUILT_IN_TAN):
5834 CASE_FLT_FN (BUILT_IN_ASIN):
5835 CASE_FLT_FN (BUILT_IN_ACOS):
5836 CASE_FLT_FN (BUILT_IN_ATAN):
5837 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5838 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5839 because of possible accuracy problems. */
5840 if (! flag_unsafe_math_optimizations)
5841 break;
5842 CASE_FLT_FN (BUILT_IN_SQRT):
5843 CASE_FLT_FN (BUILT_IN_FLOOR):
5844 CASE_FLT_FN (BUILT_IN_CEIL):
5845 CASE_FLT_FN (BUILT_IN_TRUNC):
5846 CASE_FLT_FN (BUILT_IN_ROUND):
5847 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5848 CASE_FLT_FN (BUILT_IN_RINT):
5849 target = expand_builtin_mathfn (exp, target, subtarget);
5850 if (target)
5851 return target;
5852 break;
5854 CASE_FLT_FN (BUILT_IN_FMA):
5855 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5856 if (target)
5857 return target;
5858 break;
5860 CASE_FLT_FN (BUILT_IN_ILOGB):
5861 if (! flag_unsafe_math_optimizations)
5862 break;
5863 CASE_FLT_FN (BUILT_IN_ISINF):
5864 CASE_FLT_FN (BUILT_IN_FINITE):
5865 case BUILT_IN_ISFINITE:
5866 case BUILT_IN_ISNORMAL:
5867 target = expand_builtin_interclass_mathfn (exp, target);
5868 if (target)
5869 return target;
5870 break;
5872 CASE_FLT_FN (BUILT_IN_LCEIL):
5873 CASE_FLT_FN (BUILT_IN_LLCEIL):
5874 CASE_FLT_FN (BUILT_IN_LFLOOR):
5875 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5876 target = expand_builtin_int_roundingfn (exp, target);
5877 if (target)
5878 return target;
5879 break;
5881 CASE_FLT_FN (BUILT_IN_LRINT):
5882 CASE_FLT_FN (BUILT_IN_LLRINT):
5883 CASE_FLT_FN (BUILT_IN_LROUND):
5884 CASE_FLT_FN (BUILT_IN_LLROUND):
5885 target = expand_builtin_int_roundingfn_2 (exp, target);
5886 if (target)
5887 return target;
5888 break;
5890 CASE_FLT_FN (BUILT_IN_POW):
5891 target = expand_builtin_pow (exp, target, subtarget);
5892 if (target)
5893 return target;
5894 break;
5896 CASE_FLT_FN (BUILT_IN_POWI):
5897 target = expand_builtin_powi (exp, target);
5898 if (target)
5899 return target;
5900 break;
5902 CASE_FLT_FN (BUILT_IN_ATAN2):
5903 CASE_FLT_FN (BUILT_IN_LDEXP):
5904 CASE_FLT_FN (BUILT_IN_SCALB):
5905 CASE_FLT_FN (BUILT_IN_SCALBN):
5906 CASE_FLT_FN (BUILT_IN_SCALBLN):
5907 if (! flag_unsafe_math_optimizations)
5908 break;
5910 CASE_FLT_FN (BUILT_IN_FMOD):
5911 CASE_FLT_FN (BUILT_IN_REMAINDER):
5912 CASE_FLT_FN (BUILT_IN_DREM):
5913 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5914 if (target)
5915 return target;
5916 break;
5918 CASE_FLT_FN (BUILT_IN_CEXPI):
5919 target = expand_builtin_cexpi (exp, target);
5920 gcc_assert (target);
5921 return target;
5923 CASE_FLT_FN (BUILT_IN_SIN):
5924 CASE_FLT_FN (BUILT_IN_COS):
5925 if (! flag_unsafe_math_optimizations)
5926 break;
5927 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5928 if (target)
5929 return target;
5930 break;
5932 CASE_FLT_FN (BUILT_IN_SINCOS):
5933 if (! flag_unsafe_math_optimizations)
5934 break;
5935 target = expand_builtin_sincos (exp);
5936 if (target)
5937 return target;
5938 break;
5940 case BUILT_IN_APPLY_ARGS:
5941 return expand_builtin_apply_args ();
5943 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5944 FUNCTION with a copy of the parameters described by
5945 ARGUMENTS, and ARGSIZE. It returns a block of memory
5946 allocated on the stack into which is stored all the registers
5947 that might possibly be used for returning the result of a
5948 function. ARGUMENTS is the value returned by
5949 __builtin_apply_args. ARGSIZE is the number of bytes of
5950 arguments that must be copied. ??? How should this value be
5951 computed? We'll also need a safe worst case value for varargs
5952 functions. */
5953 case BUILT_IN_APPLY:
5954 if (!validate_arglist (exp, POINTER_TYPE,
5955 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5956 && !validate_arglist (exp, REFERENCE_TYPE,
5957 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5958 return const0_rtx;
5959 else
5961 rtx ops[3];
5963 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5964 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5965 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5967 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5970 /* __builtin_return (RESULT) causes the function to return the
5971 value described by RESULT. RESULT is address of the block of
5972 memory returned by __builtin_apply. */
5973 case BUILT_IN_RETURN:
5974 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5975 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5976 return const0_rtx;
5978 case BUILT_IN_SAVEREGS:
5979 return expand_builtin_saveregs ();
5981 case BUILT_IN_VA_ARG_PACK:
5982 /* All valid uses of __builtin_va_arg_pack () are removed during
5983 inlining. */
5984 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5985 return const0_rtx;
5987 case BUILT_IN_VA_ARG_PACK_LEN:
5988 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5989 inlining. */
5990 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5991 return const0_rtx;
5993 /* Return the address of the first anonymous stack arg. */
5994 case BUILT_IN_NEXT_ARG:
5995 if (fold_builtin_next_arg (exp, false))
5996 return const0_rtx;
5997 return expand_builtin_next_arg ();
5999 case BUILT_IN_CLEAR_CACHE:
6000 target = expand_builtin___clear_cache (exp);
6001 if (target)
6002 return target;
6003 break;
6005 case BUILT_IN_CLASSIFY_TYPE:
6006 return expand_builtin_classify_type (exp);
6008 case BUILT_IN_CONSTANT_P:
6009 return const0_rtx;
6011 case BUILT_IN_FRAME_ADDRESS:
6012 case BUILT_IN_RETURN_ADDRESS:
6013 return expand_builtin_frame_address (fndecl, exp);
6015 /* Returns the address of the area where the structure is returned.
6016 0 otherwise. */
6017 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6018 if (call_expr_nargs (exp) != 0
6019 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6020 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6021 return const0_rtx;
6022 else
6023 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6025 case BUILT_IN_ALLOCA:
6026 /* If the allocation stems from the declaration of a variable-sized
6027 object, it cannot accumulate. */
6028 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6029 if (target)
6030 return target;
6031 break;
6033 case BUILT_IN_STACK_SAVE:
6034 return expand_stack_save ();
6036 case BUILT_IN_STACK_RESTORE:
6037 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6038 return const0_rtx;
6040 case BUILT_IN_BSWAP32:
6041 case BUILT_IN_BSWAP64:
6042 target = expand_builtin_bswap (exp, target, subtarget);
6044 if (target)
6045 return target;
6046 break;
6048 CASE_INT_FN (BUILT_IN_FFS):
6049 case BUILT_IN_FFSIMAX:
6050 target = expand_builtin_unop (target_mode, exp, target,
6051 subtarget, ffs_optab);
6052 if (target)
6053 return target;
6054 break;
6056 CASE_INT_FN (BUILT_IN_CLZ):
6057 case BUILT_IN_CLZIMAX:
6058 target = expand_builtin_unop (target_mode, exp, target,
6059 subtarget, clz_optab);
6060 if (target)
6061 return target;
6062 break;
6064 CASE_INT_FN (BUILT_IN_CTZ):
6065 case BUILT_IN_CTZIMAX:
6066 target = expand_builtin_unop (target_mode, exp, target,
6067 subtarget, ctz_optab);
6068 if (target)
6069 return target;
6070 break;
6072 CASE_INT_FN (BUILT_IN_POPCOUNT):
6073 case BUILT_IN_POPCOUNTIMAX:
6074 target = expand_builtin_unop (target_mode, exp, target,
6075 subtarget, popcount_optab);
6076 if (target)
6077 return target;
6078 break;
6080 CASE_INT_FN (BUILT_IN_PARITY):
6081 case BUILT_IN_PARITYIMAX:
6082 target = expand_builtin_unop (target_mode, exp, target,
6083 subtarget, parity_optab);
6084 if (target)
6085 return target;
6086 break;
6088 case BUILT_IN_STRLEN:
6089 target = expand_builtin_strlen (exp, target, target_mode);
6090 if (target)
6091 return target;
6092 break;
6094 case BUILT_IN_STRCPY:
6095 target = expand_builtin_strcpy (exp, target);
6096 if (target)
6097 return target;
6098 break;
6100 case BUILT_IN_STRNCPY:
6101 target = expand_builtin_strncpy (exp, target);
6102 if (target)
6103 return target;
6104 break;
6106 case BUILT_IN_STPCPY:
6107 target = expand_builtin_stpcpy (exp, target, mode);
6108 if (target)
6109 return target;
6110 break;
6112 case BUILT_IN_MEMCPY:
6113 target = expand_builtin_memcpy (exp, target);
6114 if (target)
6115 return target;
6116 break;
6118 case BUILT_IN_MEMPCPY:
6119 target = expand_builtin_mempcpy (exp, target, mode);
6120 if (target)
6121 return target;
6122 break;
6124 case BUILT_IN_MEMSET:
6125 target = expand_builtin_memset (exp, target, mode);
6126 if (target)
6127 return target;
6128 break;
6130 case BUILT_IN_BZERO:
6131 target = expand_builtin_bzero (exp);
6132 if (target)
6133 return target;
6134 break;
6136 case BUILT_IN_STRCMP:
6137 target = expand_builtin_strcmp (exp, target);
6138 if (target)
6139 return target;
6140 break;
6142 case BUILT_IN_STRNCMP:
6143 target = expand_builtin_strncmp (exp, target, mode);
6144 if (target)
6145 return target;
6146 break;
6148 case BUILT_IN_BCMP:
6149 case BUILT_IN_MEMCMP:
6150 target = expand_builtin_memcmp (exp, target, mode);
6151 if (target)
6152 return target;
6153 break;
6155 case BUILT_IN_SETJMP:
6156 /* This should have been lowered to the builtins below. */
6157 gcc_unreachable ();
6159 case BUILT_IN_SETJMP_SETUP:
6160 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6161 and the receiver label. */
6162 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6164 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6165 VOIDmode, EXPAND_NORMAL);
6166 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6167 rtx label_r = label_rtx (label);
6169 /* This is copied from the handling of non-local gotos. */
6170 expand_builtin_setjmp_setup (buf_addr, label_r);
6171 nonlocal_goto_handler_labels
6172 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6173 nonlocal_goto_handler_labels);
6174 /* ??? Do not let expand_label treat us as such since we would
6175 not want to be both on the list of non-local labels and on
6176 the list of forced labels. */
6177 FORCED_LABEL (label) = 0;
6178 return const0_rtx;
6180 break;
6182 case BUILT_IN_SETJMP_DISPATCHER:
6183 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6184 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6186 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6187 rtx label_r = label_rtx (label);
6189 /* Remove the dispatcher label from the list of non-local labels
6190 since the receiver labels have been added to it above. */
6191 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6192 return const0_rtx;
6194 break;
6196 case BUILT_IN_SETJMP_RECEIVER:
6197 /* __builtin_setjmp_receiver is passed the receiver label. */
6198 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6200 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6201 rtx label_r = label_rtx (label);
6203 expand_builtin_setjmp_receiver (label_r);
6204 return const0_rtx;
6206 break;
6208 /* __builtin_longjmp is passed a pointer to an array of five words.
6209 It's similar to the C library longjmp function but works with
6210 __builtin_setjmp above. */
6211 case BUILT_IN_LONGJMP:
6212 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6214 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6215 VOIDmode, EXPAND_NORMAL);
6216 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6218 if (value != const1_rtx)
6220 error ("%<__builtin_longjmp%> second argument must be 1");
6221 return const0_rtx;
6224 expand_builtin_longjmp (buf_addr, value);
6225 return const0_rtx;
6227 break;
6229 case BUILT_IN_NONLOCAL_GOTO:
6230 target = expand_builtin_nonlocal_goto (exp);
6231 if (target)
6232 return target;
6233 break;
6235 /* This updates the setjmp buffer that is its argument with the value
6236 of the current stack pointer. */
6237 case BUILT_IN_UPDATE_SETJMP_BUF:
6238 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6240 rtx buf_addr
6241 = expand_normal (CALL_EXPR_ARG (exp, 0));
6243 expand_builtin_update_setjmp_buf (buf_addr);
6244 return const0_rtx;
6246 break;
6248 case BUILT_IN_TRAP:
6249 expand_builtin_trap ();
6250 return const0_rtx;
6252 case BUILT_IN_UNREACHABLE:
6253 expand_builtin_unreachable ();
6254 return const0_rtx;
6256 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6257 case BUILT_IN_SIGNBITD32:
6258 case BUILT_IN_SIGNBITD64:
6259 case BUILT_IN_SIGNBITD128:
6260 target = expand_builtin_signbit (exp, target);
6261 if (target)
6262 return target;
6263 break;
6265 /* Various hooks for the DWARF 2 __throw routine. */
6266 case BUILT_IN_UNWIND_INIT:
6267 expand_builtin_unwind_init ();
6268 return const0_rtx;
6269 case BUILT_IN_DWARF_CFA:
6270 return virtual_cfa_rtx;
6271 #ifdef DWARF2_UNWIND_INFO
6272 case BUILT_IN_DWARF_SP_COLUMN:
6273 return expand_builtin_dwarf_sp_column ();
6274 case BUILT_IN_INIT_DWARF_REG_SIZES:
6275 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6276 return const0_rtx;
6277 #endif
6278 case BUILT_IN_FROB_RETURN_ADDR:
6279 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6280 case BUILT_IN_EXTRACT_RETURN_ADDR:
6281 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6282 case BUILT_IN_EH_RETURN:
6283 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6284 CALL_EXPR_ARG (exp, 1));
6285 return const0_rtx;
6286 #ifdef EH_RETURN_DATA_REGNO
6287 case BUILT_IN_EH_RETURN_DATA_REGNO:
6288 return expand_builtin_eh_return_data_regno (exp);
6289 #endif
6290 case BUILT_IN_EXTEND_POINTER:
6291 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6292 case BUILT_IN_EH_POINTER:
6293 return expand_builtin_eh_pointer (exp);
6294 case BUILT_IN_EH_FILTER:
6295 return expand_builtin_eh_filter (exp);
6296 case BUILT_IN_EH_COPY_VALUES:
6297 return expand_builtin_eh_copy_values (exp);
6299 case BUILT_IN_VA_START:
6300 return expand_builtin_va_start (exp);
6301 case BUILT_IN_VA_END:
6302 return expand_builtin_va_end (exp);
6303 case BUILT_IN_VA_COPY:
6304 return expand_builtin_va_copy (exp);
6305 case BUILT_IN_EXPECT:
6306 return expand_builtin_expect (exp, target);
6307 case BUILT_IN_PREFETCH:
6308 expand_builtin_prefetch (exp);
6309 return const0_rtx;
6311 case BUILT_IN_INIT_TRAMPOLINE:
6312 return expand_builtin_init_trampoline (exp);
6313 case BUILT_IN_ADJUST_TRAMPOLINE:
6314 return expand_builtin_adjust_trampoline (exp);
6316 case BUILT_IN_FORK:
6317 case BUILT_IN_EXECL:
6318 case BUILT_IN_EXECV:
6319 case BUILT_IN_EXECLP:
6320 case BUILT_IN_EXECLE:
6321 case BUILT_IN_EXECVP:
6322 case BUILT_IN_EXECVE:
6323 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6324 if (target)
6325 return target;
6326 break;
6328 case BUILT_IN_FETCH_AND_ADD_1:
6329 case BUILT_IN_FETCH_AND_ADD_2:
6330 case BUILT_IN_FETCH_AND_ADD_4:
6331 case BUILT_IN_FETCH_AND_ADD_8:
6332 case BUILT_IN_FETCH_AND_ADD_16:
6333 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6334 target = expand_builtin_sync_operation (mode, exp, PLUS,
6335 false, target, ignore);
6336 if (target)
6337 return target;
6338 break;
6340 case BUILT_IN_FETCH_AND_SUB_1:
6341 case BUILT_IN_FETCH_AND_SUB_2:
6342 case BUILT_IN_FETCH_AND_SUB_4:
6343 case BUILT_IN_FETCH_AND_SUB_8:
6344 case BUILT_IN_FETCH_AND_SUB_16:
6345 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6346 target = expand_builtin_sync_operation (mode, exp, MINUS,
6347 false, target, ignore);
6348 if (target)
6349 return target;
6350 break;
6352 case BUILT_IN_FETCH_AND_OR_1:
6353 case BUILT_IN_FETCH_AND_OR_2:
6354 case BUILT_IN_FETCH_AND_OR_4:
6355 case BUILT_IN_FETCH_AND_OR_8:
6356 case BUILT_IN_FETCH_AND_OR_16:
6357 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6358 target = expand_builtin_sync_operation (mode, exp, IOR,
6359 false, target, ignore);
6360 if (target)
6361 return target;
6362 break;
6364 case BUILT_IN_FETCH_AND_AND_1:
6365 case BUILT_IN_FETCH_AND_AND_2:
6366 case BUILT_IN_FETCH_AND_AND_4:
6367 case BUILT_IN_FETCH_AND_AND_8:
6368 case BUILT_IN_FETCH_AND_AND_16:
6369 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6370 target = expand_builtin_sync_operation (mode, exp, AND,
6371 false, target, ignore);
6372 if (target)
6373 return target;
6374 break;
6376 case BUILT_IN_FETCH_AND_XOR_1:
6377 case BUILT_IN_FETCH_AND_XOR_2:
6378 case BUILT_IN_FETCH_AND_XOR_4:
6379 case BUILT_IN_FETCH_AND_XOR_8:
6380 case BUILT_IN_FETCH_AND_XOR_16:
6381 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6382 target = expand_builtin_sync_operation (mode, exp, XOR,
6383 false, target, ignore);
6384 if (target)
6385 return target;
6386 break;
6388 case BUILT_IN_FETCH_AND_NAND_1:
6389 case BUILT_IN_FETCH_AND_NAND_2:
6390 case BUILT_IN_FETCH_AND_NAND_4:
6391 case BUILT_IN_FETCH_AND_NAND_8:
6392 case BUILT_IN_FETCH_AND_NAND_16:
6393 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6394 target = expand_builtin_sync_operation (mode, exp, NOT,
6395 false, target, ignore);
6396 if (target)
6397 return target;
6398 break;
6400 case BUILT_IN_ADD_AND_FETCH_1:
6401 case BUILT_IN_ADD_AND_FETCH_2:
6402 case BUILT_IN_ADD_AND_FETCH_4:
6403 case BUILT_IN_ADD_AND_FETCH_8:
6404 case BUILT_IN_ADD_AND_FETCH_16:
6405 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6406 target = expand_builtin_sync_operation (mode, exp, PLUS,
6407 true, target, ignore);
6408 if (target)
6409 return target;
6410 break;
6412 case BUILT_IN_SUB_AND_FETCH_1:
6413 case BUILT_IN_SUB_AND_FETCH_2:
6414 case BUILT_IN_SUB_AND_FETCH_4:
6415 case BUILT_IN_SUB_AND_FETCH_8:
6416 case BUILT_IN_SUB_AND_FETCH_16:
6417 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6418 target = expand_builtin_sync_operation (mode, exp, MINUS,
6419 true, target, ignore);
6420 if (target)
6421 return target;
6422 break;
6424 case BUILT_IN_OR_AND_FETCH_1:
6425 case BUILT_IN_OR_AND_FETCH_2:
6426 case BUILT_IN_OR_AND_FETCH_4:
6427 case BUILT_IN_OR_AND_FETCH_8:
6428 case BUILT_IN_OR_AND_FETCH_16:
6429 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6430 target = expand_builtin_sync_operation (mode, exp, IOR,
6431 true, target, ignore);
6432 if (target)
6433 return target;
6434 break;
6436 case BUILT_IN_AND_AND_FETCH_1:
6437 case BUILT_IN_AND_AND_FETCH_2:
6438 case BUILT_IN_AND_AND_FETCH_4:
6439 case BUILT_IN_AND_AND_FETCH_8:
6440 case BUILT_IN_AND_AND_FETCH_16:
6441 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6442 target = expand_builtin_sync_operation (mode, exp, AND,
6443 true, target, ignore);
6444 if (target)
6445 return target;
6446 break;
6448 case BUILT_IN_XOR_AND_FETCH_1:
6449 case BUILT_IN_XOR_AND_FETCH_2:
6450 case BUILT_IN_XOR_AND_FETCH_4:
6451 case BUILT_IN_XOR_AND_FETCH_8:
6452 case BUILT_IN_XOR_AND_FETCH_16:
6453 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6454 target = expand_builtin_sync_operation (mode, exp, XOR,
6455 true, target, ignore);
6456 if (target)
6457 return target;
6458 break;
6460 case BUILT_IN_NAND_AND_FETCH_1:
6461 case BUILT_IN_NAND_AND_FETCH_2:
6462 case BUILT_IN_NAND_AND_FETCH_4:
6463 case BUILT_IN_NAND_AND_FETCH_8:
6464 case BUILT_IN_NAND_AND_FETCH_16:
6465 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6466 target = expand_builtin_sync_operation (mode, exp, NOT,
6467 true, target, ignore);
6468 if (target)
6469 return target;
6470 break;
6472 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6473 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6474 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6475 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6476 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6477 if (mode == VOIDmode)
6478 mode = TYPE_MODE (boolean_type_node);
6479 if (!target || !register_operand (target, mode))
6480 target = gen_reg_rtx (mode);
6482 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6483 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6484 if (target)
6485 return target;
6486 break;
6488 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6489 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6490 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6491 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6492 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6493 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6494 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6495 if (target)
6496 return target;
6497 break;
6499 case BUILT_IN_LOCK_TEST_AND_SET_1:
6500 case BUILT_IN_LOCK_TEST_AND_SET_2:
6501 case BUILT_IN_LOCK_TEST_AND_SET_4:
6502 case BUILT_IN_LOCK_TEST_AND_SET_8:
6503 case BUILT_IN_LOCK_TEST_AND_SET_16:
6504 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6505 target = expand_builtin_lock_test_and_set (mode, exp, target);
6506 if (target)
6507 return target;
6508 break;
6510 case BUILT_IN_LOCK_RELEASE_1:
6511 case BUILT_IN_LOCK_RELEASE_2:
6512 case BUILT_IN_LOCK_RELEASE_4:
6513 case BUILT_IN_LOCK_RELEASE_8:
6514 case BUILT_IN_LOCK_RELEASE_16:
6515 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6516 expand_builtin_lock_release (mode, exp);
6517 return const0_rtx;
6519 case BUILT_IN_SYNCHRONIZE:
6520 expand_builtin_synchronize ();
6521 return const0_rtx;
6523 case BUILT_IN_OBJECT_SIZE:
6524 return expand_builtin_object_size (exp);
6526 case BUILT_IN_MEMCPY_CHK:
6527 case BUILT_IN_MEMPCPY_CHK:
6528 case BUILT_IN_MEMMOVE_CHK:
6529 case BUILT_IN_MEMSET_CHK:
6530 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6531 if (target)
6532 return target;
6533 break;
6535 case BUILT_IN_STRCPY_CHK:
6536 case BUILT_IN_STPCPY_CHK:
6537 case BUILT_IN_STRNCPY_CHK:
6538 case BUILT_IN_STRCAT_CHK:
6539 case BUILT_IN_STRNCAT_CHK:
6540 case BUILT_IN_SNPRINTF_CHK:
6541 case BUILT_IN_VSNPRINTF_CHK:
6542 maybe_emit_chk_warning (exp, fcode);
6543 break;
6545 case BUILT_IN_SPRINTF_CHK:
6546 case BUILT_IN_VSPRINTF_CHK:
6547 maybe_emit_sprintf_chk_warning (exp, fcode);
6548 break;
6550 case BUILT_IN_FREE:
6551 maybe_emit_free_warning (exp);
6552 break;
6554 default: /* just do library call, if unknown builtin */
6555 break;
6558 /* The switch statement above can drop through to cause the function
6559 to be called normally. */
6560 return expand_call (exp, target, ignore);
6563 /* Determine whether a tree node represents a call to a built-in
6564 function. If the tree T is a call to a built-in function with
6565 the right number of arguments of the appropriate types, return
6566 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6567 Otherwise the return value is END_BUILTINS. */
6569 enum built_in_function
6570 builtin_mathfn_code (const_tree t)
6572 const_tree fndecl, arg, parmlist;
6573 const_tree argtype, parmtype;
6574 const_call_expr_arg_iterator iter;
6576 if (TREE_CODE (t) != CALL_EXPR
6577 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6578 return END_BUILTINS;
6580 fndecl = get_callee_fndecl (t);
6581 if (fndecl == NULL_TREE
6582 || TREE_CODE (fndecl) != FUNCTION_DECL
6583 || ! DECL_BUILT_IN (fndecl)
6584 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6585 return END_BUILTINS;
6587 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6588 init_const_call_expr_arg_iterator (t, &iter);
6589 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6591 /* If a function doesn't take a variable number of arguments,
6592 the last element in the list will have type `void'. */
6593 parmtype = TREE_VALUE (parmlist);
6594 if (VOID_TYPE_P (parmtype))
6596 if (more_const_call_expr_args_p (&iter))
6597 return END_BUILTINS;
6598 return DECL_FUNCTION_CODE (fndecl);
6601 if (! more_const_call_expr_args_p (&iter))
6602 return END_BUILTINS;
6604 arg = next_const_call_expr_arg (&iter);
6605 argtype = TREE_TYPE (arg);
6607 if (SCALAR_FLOAT_TYPE_P (parmtype))
6609 if (! SCALAR_FLOAT_TYPE_P (argtype))
6610 return END_BUILTINS;
6612 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6614 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6615 return END_BUILTINS;
6617 else if (POINTER_TYPE_P (parmtype))
6619 if (! POINTER_TYPE_P (argtype))
6620 return END_BUILTINS;
6622 else if (INTEGRAL_TYPE_P (parmtype))
6624 if (! INTEGRAL_TYPE_P (argtype))
6625 return END_BUILTINS;
6627 else
6628 return END_BUILTINS;
6631 /* Variable-length argument list. */
6632 return DECL_FUNCTION_CODE (fndecl);
6635 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6636 evaluate to a constant. */
6638 static tree
6639 fold_builtin_constant_p (tree arg)
6641 /* We return 1 for a numeric type that's known to be a constant
6642 value at compile-time or for an aggregate type that's a
6643 literal constant. */
6644 STRIP_NOPS (arg);
6646 /* If we know this is a constant, emit the constant of one. */
6647 if (CONSTANT_CLASS_P (arg)
6648 || (TREE_CODE (arg) == CONSTRUCTOR
6649 && TREE_CONSTANT (arg)))
6650 return integer_one_node;
6651 if (TREE_CODE (arg) == ADDR_EXPR)
6653 tree op = TREE_OPERAND (arg, 0);
6654 if (TREE_CODE (op) == STRING_CST
6655 || (TREE_CODE (op) == ARRAY_REF
6656 && integer_zerop (TREE_OPERAND (op, 1))
6657 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6658 return integer_one_node;
6661 /* If this expression has side effects, show we don't know it to be a
6662 constant. Likewise if it's a pointer or aggregate type since in
6663 those case we only want literals, since those are only optimized
6664 when generating RTL, not later.
6665 And finally, if we are compiling an initializer, not code, we
6666 need to return a definite result now; there's not going to be any
6667 more optimization done. */
6668 if (TREE_SIDE_EFFECTS (arg)
6669 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6670 || POINTER_TYPE_P (TREE_TYPE (arg))
6671 || cfun == 0
6672 || folding_initializer)
6673 return integer_zero_node;
6675 return NULL_TREE;
6678 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6679 return it as a truthvalue. */
6681 static tree
6682 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6684 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6686 fn = built_in_decls[BUILT_IN_EXPECT];
6687 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6688 ret_type = TREE_TYPE (TREE_TYPE (fn));
6689 pred_type = TREE_VALUE (arg_types);
6690 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6692 pred = fold_convert_loc (loc, pred_type, pred);
6693 expected = fold_convert_loc (loc, expected_type, expected);
6694 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6696 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6697 build_int_cst (ret_type, 0));
6700 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6701 NULL_TREE if no simplification is possible. */
6703 static tree
6704 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6706 tree inner, fndecl;
6707 enum tree_code code;
6709 /* If this is a builtin_expect within a builtin_expect keep the
6710 inner one. See through a comparison against a constant. It
6711 might have been added to create a thruthvalue. */
6712 inner = arg0;
6713 if (COMPARISON_CLASS_P (inner)
6714 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6715 inner = TREE_OPERAND (inner, 0);
6717 if (TREE_CODE (inner) == CALL_EXPR
6718 && (fndecl = get_callee_fndecl (inner))
6719 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6720 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6721 return arg0;
6723 /* Distribute the expected value over short-circuiting operators.
6724 See through the cast from truthvalue_type_node to long. */
6725 inner = arg0;
6726 while (TREE_CODE (inner) == NOP_EXPR
6727 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6728 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6729 inner = TREE_OPERAND (inner, 0);
6731 code = TREE_CODE (inner);
6732 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6734 tree op0 = TREE_OPERAND (inner, 0);
6735 tree op1 = TREE_OPERAND (inner, 1);
6737 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6738 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6739 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6741 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6744 /* If the argument isn't invariant then there's nothing else we can do. */
6745 if (!TREE_CONSTANT (arg0))
6746 return NULL_TREE;
6748 /* If we expect that a comparison against the argument will fold to
6749 a constant return the constant. In practice, this means a true
6750 constant or the address of a non-weak symbol. */
6751 inner = arg0;
6752 STRIP_NOPS (inner);
6753 if (TREE_CODE (inner) == ADDR_EXPR)
6757 inner = TREE_OPERAND (inner, 0);
6759 while (TREE_CODE (inner) == COMPONENT_REF
6760 || TREE_CODE (inner) == ARRAY_REF);
6761 if ((TREE_CODE (inner) == VAR_DECL
6762 || TREE_CODE (inner) == FUNCTION_DECL)
6763 && DECL_WEAK (inner))
6764 return NULL_TREE;
6767 /* Otherwise, ARG0 already has the proper type for the return value. */
6768 return arg0;
6771 /* Fold a call to __builtin_classify_type with argument ARG. */
6773 static tree
6774 fold_builtin_classify_type (tree arg)
6776 if (arg == 0)
6777 return build_int_cst (NULL_TREE, no_type_class);
6779 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6782 /* Fold a call to __builtin_strlen with argument ARG. */
6784 static tree
6785 fold_builtin_strlen (location_t loc, tree type, tree arg)
6787 if (!validate_arg (arg, POINTER_TYPE))
6788 return NULL_TREE;
6789 else
6791 tree len = c_strlen (arg, 0);
6793 if (len)
6794 return fold_convert_loc (loc, type, len);
6796 return NULL_TREE;
6800 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6802 static tree
6803 fold_builtin_inf (location_t loc, tree type, int warn)
6805 REAL_VALUE_TYPE real;
6807 /* __builtin_inff is intended to be usable to define INFINITY on all
6808 targets. If an infinity is not available, INFINITY expands "to a
6809 positive constant of type float that overflows at translation
6810 time", footnote "In this case, using INFINITY will violate the
6811 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6812 Thus we pedwarn to ensure this constraint violation is
6813 diagnosed. */
6814 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6815 pedwarn (loc, 0, "target format does not support infinity");
6817 real_inf (&real);
6818 return build_real (type, real);
6821 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6823 static tree
6824 fold_builtin_nan (tree arg, tree type, int quiet)
6826 REAL_VALUE_TYPE real;
6827 const char *str;
6829 if (!validate_arg (arg, POINTER_TYPE))
6830 return NULL_TREE;
6831 str = c_getstr (arg);
6832 if (!str)
6833 return NULL_TREE;
6835 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6836 return NULL_TREE;
6838 return build_real (type, real);
6841 /* Return true if the floating point expression T has an integer value.
6842 We also allow +Inf, -Inf and NaN to be considered integer values. */
6844 static bool
6845 integer_valued_real_p (tree t)
6847 switch (TREE_CODE (t))
6849 case FLOAT_EXPR:
6850 return true;
6852 case ABS_EXPR:
6853 case SAVE_EXPR:
6854 return integer_valued_real_p (TREE_OPERAND (t, 0));
6856 case COMPOUND_EXPR:
6857 case MODIFY_EXPR:
6858 case BIND_EXPR:
6859 return integer_valued_real_p (TREE_OPERAND (t, 1));
6861 case PLUS_EXPR:
6862 case MINUS_EXPR:
6863 case MULT_EXPR:
6864 case MIN_EXPR:
6865 case MAX_EXPR:
6866 return integer_valued_real_p (TREE_OPERAND (t, 0))
6867 && integer_valued_real_p (TREE_OPERAND (t, 1));
6869 case COND_EXPR:
6870 return integer_valued_real_p (TREE_OPERAND (t, 1))
6871 && integer_valued_real_p (TREE_OPERAND (t, 2));
6873 case REAL_CST:
6874 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6876 case NOP_EXPR:
6878 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6879 if (TREE_CODE (type) == INTEGER_TYPE)
6880 return true;
6881 if (TREE_CODE (type) == REAL_TYPE)
6882 return integer_valued_real_p (TREE_OPERAND (t, 0));
6883 break;
6886 case CALL_EXPR:
6887 switch (builtin_mathfn_code (t))
6889 CASE_FLT_FN (BUILT_IN_CEIL):
6890 CASE_FLT_FN (BUILT_IN_FLOOR):
6891 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6892 CASE_FLT_FN (BUILT_IN_RINT):
6893 CASE_FLT_FN (BUILT_IN_ROUND):
6894 CASE_FLT_FN (BUILT_IN_TRUNC):
6895 return true;
6897 CASE_FLT_FN (BUILT_IN_FMIN):
6898 CASE_FLT_FN (BUILT_IN_FMAX):
6899 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6900 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6902 default:
6903 break;
6905 break;
6907 default:
6908 break;
6910 return false;
6913 /* FNDECL is assumed to be a builtin where truncation can be propagated
6914 across (for instance floor((double)f) == (double)floorf (f).
6915 Do the transformation for a call with argument ARG. */
6917 static tree
6918 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6920 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6922 if (!validate_arg (arg, REAL_TYPE))
6923 return NULL_TREE;
6925 /* Integer rounding functions are idempotent. */
6926 if (fcode == builtin_mathfn_code (arg))
6927 return arg;
6929 /* If argument is already integer valued, and we don't need to worry
6930 about setting errno, there's no need to perform rounding. */
6931 if (! flag_errno_math && integer_valued_real_p (arg))
6932 return arg;
6934 if (optimize)
6936 tree arg0 = strip_float_extensions (arg);
6937 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6938 tree newtype = TREE_TYPE (arg0);
6939 tree decl;
6941 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6942 && (decl = mathfn_built_in (newtype, fcode)))
6943 return fold_convert_loc (loc, ftype,
6944 build_call_expr_loc (loc, decl, 1,
6945 fold_convert_loc (loc,
6946 newtype,
6947 arg0)));
6949 return NULL_TREE;
6952 /* FNDECL is assumed to be builtin which can narrow the FP type of
6953 the argument, for instance lround((double)f) -> lroundf (f).
6954 Do the transformation for a call with argument ARG. */
6956 static tree
6957 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6959 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6961 if (!validate_arg (arg, REAL_TYPE))
6962 return NULL_TREE;
6964 /* If argument is already integer valued, and we don't need to worry
6965 about setting errno, there's no need to perform rounding. */
6966 if (! flag_errno_math && integer_valued_real_p (arg))
6967 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6968 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6970 if (optimize)
6972 tree ftype = TREE_TYPE (arg);
6973 tree arg0 = strip_float_extensions (arg);
6974 tree newtype = TREE_TYPE (arg0);
6975 tree decl;
6977 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6978 && (decl = mathfn_built_in (newtype, fcode)))
6979 return build_call_expr_loc (loc, decl, 1,
6980 fold_convert_loc (loc, newtype, arg0));
6983 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6984 sizeof (long long) == sizeof (long). */
6985 if (TYPE_PRECISION (long_long_integer_type_node)
6986 == TYPE_PRECISION (long_integer_type_node))
6988 tree newfn = NULL_TREE;
6989 switch (fcode)
6991 CASE_FLT_FN (BUILT_IN_LLCEIL):
6992 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6993 break;
6995 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6996 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6997 break;
6999 CASE_FLT_FN (BUILT_IN_LLROUND):
7000 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7001 break;
7003 CASE_FLT_FN (BUILT_IN_LLRINT):
7004 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7005 break;
7007 default:
7008 break;
7011 if (newfn)
7013 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7014 return fold_convert_loc (loc,
7015 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7019 return NULL_TREE;
7022 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7023 return type. Return NULL_TREE if no simplification can be made. */
7025 static tree
7026 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7028 tree res;
7030 if (!validate_arg (arg, COMPLEX_TYPE)
7031 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7032 return NULL_TREE;
7034 /* Calculate the result when the argument is a constant. */
7035 if (TREE_CODE (arg) == COMPLEX_CST
7036 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7037 type, mpfr_hypot)))
7038 return res;
7040 if (TREE_CODE (arg) == COMPLEX_EXPR)
7042 tree real = TREE_OPERAND (arg, 0);
7043 tree imag = TREE_OPERAND (arg, 1);
7045 /* If either part is zero, cabs is fabs of the other. */
7046 if (real_zerop (real))
7047 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7048 if (real_zerop (imag))
7049 return fold_build1_loc (loc, ABS_EXPR, type, real);
7051 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7052 if (flag_unsafe_math_optimizations
7053 && operand_equal_p (real, imag, OEP_PURE_SAME))
7055 const REAL_VALUE_TYPE sqrt2_trunc
7056 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7057 STRIP_NOPS (real);
7058 return fold_build2_loc (loc, MULT_EXPR, type,
7059 fold_build1_loc (loc, ABS_EXPR, type, real),
7060 build_real (type, sqrt2_trunc));
7064 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7065 if (TREE_CODE (arg) == NEGATE_EXPR
7066 || TREE_CODE (arg) == CONJ_EXPR)
7067 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7069 /* Don't do this when optimizing for size. */
7070 if (flag_unsafe_math_optimizations
7071 && optimize && optimize_function_for_speed_p (cfun))
7073 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7075 if (sqrtfn != NULL_TREE)
7077 tree rpart, ipart, result;
7079 arg = builtin_save_expr (arg);
7081 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7082 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7084 rpart = builtin_save_expr (rpart);
7085 ipart = builtin_save_expr (ipart);
7087 result = fold_build2_loc (loc, PLUS_EXPR, type,
7088 fold_build2_loc (loc, MULT_EXPR, type,
7089 rpart, rpart),
7090 fold_build2_loc (loc, MULT_EXPR, type,
7091 ipart, ipart));
7093 return build_call_expr_loc (loc, sqrtfn, 1, result);
7097 return NULL_TREE;
7100 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7101 complex tree type of the result. If NEG is true, the imaginary
7102 zero is negative. */
7104 static tree
7105 build_complex_cproj (tree type, bool neg)
7107 REAL_VALUE_TYPE rinf, rzero = dconst0;
7109 real_inf (&rinf);
7110 rzero.sign = neg;
7111 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7112 build_real (TREE_TYPE (type), rzero));
7115 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7116 return type. Return NULL_TREE if no simplification can be made. */
7118 static tree
7119 fold_builtin_cproj (location_t loc, tree arg, tree type)
7121 if (!validate_arg (arg, COMPLEX_TYPE)
7122 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7123 return NULL_TREE;
7125 /* If there are no infinities, return arg. */
7126 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7127 return non_lvalue_loc (loc, arg);
7129 /* Calculate the result when the argument is a constant. */
7130 if (TREE_CODE (arg) == COMPLEX_CST)
7132 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7133 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7135 if (real_isinf (real) || real_isinf (imag))
7136 return build_complex_cproj (type, imag->sign);
7137 else
7138 return arg;
7140 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7142 tree real = TREE_OPERAND (arg, 0);
7143 tree imag = TREE_OPERAND (arg, 1);
7145 STRIP_NOPS (real);
7146 STRIP_NOPS (imag);
7148 /* If the real part is inf and the imag part is known to be
7149 nonnegative, return (inf + 0i). Remember side-effects are
7150 possible in the imag part. */
7151 if (TREE_CODE (real) == REAL_CST
7152 && real_isinf (TREE_REAL_CST_PTR (real))
7153 && tree_expr_nonnegative_p (imag))
7154 return omit_one_operand_loc (loc, type,
7155 build_complex_cproj (type, false),
7156 arg);
7158 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7159 Remember side-effects are possible in the real part. */
7160 if (TREE_CODE (imag) == REAL_CST
7161 && real_isinf (TREE_REAL_CST_PTR (imag)))
7162 return
7163 omit_one_operand_loc (loc, type,
7164 build_complex_cproj (type, TREE_REAL_CST_PTR
7165 (imag)->sign), arg);
7168 return NULL_TREE;
7171 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7172 Return NULL_TREE if no simplification can be made. */
7174 static tree
7175 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7178 enum built_in_function fcode;
7179 tree res;
7181 if (!validate_arg (arg, REAL_TYPE))
7182 return NULL_TREE;
7184 /* Calculate the result when the argument is a constant. */
7185 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7186 return res;
7188 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7189 fcode = builtin_mathfn_code (arg);
7190 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7192 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7193 arg = fold_build2_loc (loc, MULT_EXPR, type,
7194 CALL_EXPR_ARG (arg, 0),
7195 build_real (type, dconsthalf));
7196 return build_call_expr_loc (loc, expfn, 1, arg);
7199 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7200 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7202 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7204 if (powfn)
7206 tree arg0 = CALL_EXPR_ARG (arg, 0);
7207 tree tree_root;
7208 /* The inner root was either sqrt or cbrt. */
7209 /* This was a conditional expression but it triggered a bug
7210 in Sun C 5.5. */
7211 REAL_VALUE_TYPE dconstroot;
7212 if (BUILTIN_SQRT_P (fcode))
7213 dconstroot = dconsthalf;
7214 else
7215 dconstroot = dconst_third ();
7217 /* Adjust for the outer root. */
7218 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7219 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7220 tree_root = build_real (type, dconstroot);
7221 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7225 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7226 if (flag_unsafe_math_optimizations
7227 && (fcode == BUILT_IN_POW
7228 || fcode == BUILT_IN_POWF
7229 || fcode == BUILT_IN_POWL))
7231 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7232 tree arg0 = CALL_EXPR_ARG (arg, 0);
7233 tree arg1 = CALL_EXPR_ARG (arg, 1);
7234 tree narg1;
7235 if (!tree_expr_nonnegative_p (arg0))
7236 arg0 = build1 (ABS_EXPR, type, arg0);
7237 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7238 build_real (type, dconsthalf));
7239 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7242 return NULL_TREE;
7245 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7246 Return NULL_TREE if no simplification can be made. */
7248 static tree
7249 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7251 const enum built_in_function fcode = builtin_mathfn_code (arg);
7252 tree res;
7254 if (!validate_arg (arg, REAL_TYPE))
7255 return NULL_TREE;
7257 /* Calculate the result when the argument is a constant. */
7258 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7259 return res;
7261 if (flag_unsafe_math_optimizations)
7263 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7264 if (BUILTIN_EXPONENT_P (fcode))
7266 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7267 const REAL_VALUE_TYPE third_trunc =
7268 real_value_truncate (TYPE_MODE (type), dconst_third ());
7269 arg = fold_build2_loc (loc, MULT_EXPR, type,
7270 CALL_EXPR_ARG (arg, 0),
7271 build_real (type, third_trunc));
7272 return build_call_expr_loc (loc, expfn, 1, arg);
7275 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7276 if (BUILTIN_SQRT_P (fcode))
7278 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7280 if (powfn)
7282 tree arg0 = CALL_EXPR_ARG (arg, 0);
7283 tree tree_root;
7284 REAL_VALUE_TYPE dconstroot = dconst_third ();
7286 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7287 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7288 tree_root = build_real (type, dconstroot);
7289 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7293 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7294 if (BUILTIN_CBRT_P (fcode))
7296 tree arg0 = CALL_EXPR_ARG (arg, 0);
7297 if (tree_expr_nonnegative_p (arg0))
7299 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7301 if (powfn)
7303 tree tree_root;
7304 REAL_VALUE_TYPE dconstroot;
7306 real_arithmetic (&dconstroot, MULT_EXPR,
7307 dconst_third_ptr (), dconst_third_ptr ());
7308 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7309 tree_root = build_real (type, dconstroot);
7310 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7315 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7316 if (fcode == BUILT_IN_POW
7317 || fcode == BUILT_IN_POWF
7318 || fcode == BUILT_IN_POWL)
7320 tree arg00 = CALL_EXPR_ARG (arg, 0);
7321 tree arg01 = CALL_EXPR_ARG (arg, 1);
7322 if (tree_expr_nonnegative_p (arg00))
7324 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7325 const REAL_VALUE_TYPE dconstroot
7326 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7327 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7328 build_real (type, dconstroot));
7329 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7333 return NULL_TREE;
7336 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7337 TYPE is the type of the return value. Return NULL_TREE if no
7338 simplification can be made. */
7340 static tree
7341 fold_builtin_cos (location_t loc,
7342 tree arg, tree type, tree fndecl)
7344 tree res, narg;
7346 if (!validate_arg (arg, REAL_TYPE))
7347 return NULL_TREE;
7349 /* Calculate the result when the argument is a constant. */
7350 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7351 return res;
7353 /* Optimize cos(-x) into cos (x). */
7354 if ((narg = fold_strip_sign_ops (arg)))
7355 return build_call_expr_loc (loc, fndecl, 1, narg);
7357 return NULL_TREE;
7360 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7361 Return NULL_TREE if no simplification can be made. */
7363 static tree
7364 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7366 if (validate_arg (arg, REAL_TYPE))
7368 tree res, narg;
7370 /* Calculate the result when the argument is a constant. */
7371 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7372 return res;
7374 /* Optimize cosh(-x) into cosh (x). */
7375 if ((narg = fold_strip_sign_ops (arg)))
7376 return build_call_expr_loc (loc, fndecl, 1, narg);
7379 return NULL_TREE;
7382 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7383 argument ARG. TYPE is the type of the return value. Return
7384 NULL_TREE if no simplification can be made. */
7386 static tree
7387 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7388 bool hyper)
7390 if (validate_arg (arg, COMPLEX_TYPE)
7391 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7393 tree tmp;
7395 /* Calculate the result when the argument is a constant. */
7396 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7397 return tmp;
7399 /* Optimize fn(-x) into fn(x). */
7400 if ((tmp = fold_strip_sign_ops (arg)))
7401 return build_call_expr_loc (loc, fndecl, 1, tmp);
7404 return NULL_TREE;
7407 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7408 Return NULL_TREE if no simplification can be made. */
7410 static tree
7411 fold_builtin_tan (tree arg, tree type)
7413 enum built_in_function fcode;
7414 tree res;
7416 if (!validate_arg (arg, REAL_TYPE))
7417 return NULL_TREE;
7419 /* Calculate the result when the argument is a constant. */
7420 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7421 return res;
7423 /* Optimize tan(atan(x)) = x. */
7424 fcode = builtin_mathfn_code (arg);
7425 if (flag_unsafe_math_optimizations
7426 && (fcode == BUILT_IN_ATAN
7427 || fcode == BUILT_IN_ATANF
7428 || fcode == BUILT_IN_ATANL))
7429 return CALL_EXPR_ARG (arg, 0);
7431 return NULL_TREE;
7434 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7435 NULL_TREE if no simplification can be made. */
7437 static tree
7438 fold_builtin_sincos (location_t loc,
7439 tree arg0, tree arg1, tree arg2)
7441 tree type;
7442 tree res, fn, call;
7444 if (!validate_arg (arg0, REAL_TYPE)
7445 || !validate_arg (arg1, POINTER_TYPE)
7446 || !validate_arg (arg2, POINTER_TYPE))
7447 return NULL_TREE;
7449 type = TREE_TYPE (arg0);
7451 /* Calculate the result when the argument is a constant. */
7452 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7453 return res;
7455 /* Canonicalize sincos to cexpi. */
7456 if (!TARGET_C99_FUNCTIONS)
7457 return NULL_TREE;
7458 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7459 if (!fn)
7460 return NULL_TREE;
7462 call = build_call_expr_loc (loc, fn, 1, arg0);
7463 call = builtin_save_expr (call);
7465 return build2 (COMPOUND_EXPR, void_type_node,
7466 build2 (MODIFY_EXPR, void_type_node,
7467 build_fold_indirect_ref_loc (loc, arg1),
7468 build1 (IMAGPART_EXPR, type, call)),
7469 build2 (MODIFY_EXPR, void_type_node,
7470 build_fold_indirect_ref_loc (loc, arg2),
7471 build1 (REALPART_EXPR, type, call)));
7474 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7475 NULL_TREE if no simplification can be made. */
7477 static tree
7478 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7480 tree rtype;
7481 tree realp, imagp, ifn;
7482 tree res;
7484 if (!validate_arg (arg0, COMPLEX_TYPE)
7485 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7486 return NULL_TREE;
7488 /* Calculate the result when the argument is a constant. */
7489 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7490 return res;
7492 rtype = TREE_TYPE (TREE_TYPE (arg0));
7494 /* In case we can figure out the real part of arg0 and it is constant zero
7495 fold to cexpi. */
7496 if (!TARGET_C99_FUNCTIONS)
7497 return NULL_TREE;
7498 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7499 if (!ifn)
7500 return NULL_TREE;
7502 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7503 && real_zerop (realp))
7505 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7506 return build_call_expr_loc (loc, ifn, 1, narg);
7509 /* In case we can easily decompose real and imaginary parts split cexp
7510 to exp (r) * cexpi (i). */
7511 if (flag_unsafe_math_optimizations
7512 && realp)
7514 tree rfn, rcall, icall;
7516 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7517 if (!rfn)
7518 return NULL_TREE;
7520 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7521 if (!imagp)
7522 return NULL_TREE;
7524 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7525 icall = builtin_save_expr (icall);
7526 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7527 rcall = builtin_save_expr (rcall);
7528 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7529 fold_build2_loc (loc, MULT_EXPR, rtype,
7530 rcall,
7531 fold_build1_loc (loc, REALPART_EXPR,
7532 rtype, icall)),
7533 fold_build2_loc (loc, MULT_EXPR, rtype,
7534 rcall,
7535 fold_build1_loc (loc, IMAGPART_EXPR,
7536 rtype, icall)));
7539 return NULL_TREE;
7542 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7543 Return NULL_TREE if no simplification can be made. */
7545 static tree
7546 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7548 if (!validate_arg (arg, REAL_TYPE))
7549 return NULL_TREE;
7551 /* Optimize trunc of constant value. */
7552 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7554 REAL_VALUE_TYPE r, x;
7555 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7557 x = TREE_REAL_CST (arg);
7558 real_trunc (&r, TYPE_MODE (type), &x);
7559 return build_real (type, r);
7562 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7565 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7566 Return NULL_TREE if no simplification can be made. */
7568 static tree
7569 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7571 if (!validate_arg (arg, REAL_TYPE))
7572 return NULL_TREE;
7574 /* Optimize floor of constant value. */
7575 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7577 REAL_VALUE_TYPE x;
7579 x = TREE_REAL_CST (arg);
7580 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7582 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7583 REAL_VALUE_TYPE r;
7585 real_floor (&r, TYPE_MODE (type), &x);
7586 return build_real (type, r);
7590 /* Fold floor (x) where x is nonnegative to trunc (x). */
7591 if (tree_expr_nonnegative_p (arg))
7593 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7594 if (truncfn)
7595 return build_call_expr_loc (loc, truncfn, 1, arg);
7598 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7601 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7602 Return NULL_TREE if no simplification can be made. */
7604 static tree
7605 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7607 if (!validate_arg (arg, REAL_TYPE))
7608 return NULL_TREE;
7610 /* Optimize ceil of constant value. */
7611 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7613 REAL_VALUE_TYPE x;
7615 x = TREE_REAL_CST (arg);
7616 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7618 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7619 REAL_VALUE_TYPE r;
7621 real_ceil (&r, TYPE_MODE (type), &x);
7622 return build_real (type, r);
7626 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7629 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7630 Return NULL_TREE if no simplification can be made. */
7632 static tree
7633 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7635 if (!validate_arg (arg, REAL_TYPE))
7636 return NULL_TREE;
7638 /* Optimize round of constant value. */
7639 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7641 REAL_VALUE_TYPE x;
7643 x = TREE_REAL_CST (arg);
7644 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7646 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7647 REAL_VALUE_TYPE r;
7649 real_round (&r, TYPE_MODE (type), &x);
7650 return build_real (type, r);
7654 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7657 /* Fold function call to builtin lround, lroundf or lroundl (or the
7658 corresponding long long versions) and other rounding functions. ARG
7659 is the argument to the call. Return NULL_TREE if no simplification
7660 can be made. */
7662 static tree
7663 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7665 if (!validate_arg (arg, REAL_TYPE))
7666 return NULL_TREE;
7668 /* Optimize lround of constant value. */
7669 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7671 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7673 if (real_isfinite (&x))
7675 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7676 tree ftype = TREE_TYPE (arg);
7677 double_int val;
7678 REAL_VALUE_TYPE r;
7680 switch (DECL_FUNCTION_CODE (fndecl))
7682 CASE_FLT_FN (BUILT_IN_LFLOOR):
7683 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7684 real_floor (&r, TYPE_MODE (ftype), &x);
7685 break;
7687 CASE_FLT_FN (BUILT_IN_LCEIL):
7688 CASE_FLT_FN (BUILT_IN_LLCEIL):
7689 real_ceil (&r, TYPE_MODE (ftype), &x);
7690 break;
7692 CASE_FLT_FN (BUILT_IN_LROUND):
7693 CASE_FLT_FN (BUILT_IN_LLROUND):
7694 real_round (&r, TYPE_MODE (ftype), &x);
7695 break;
7697 default:
7698 gcc_unreachable ();
7701 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7702 if (double_int_fits_to_tree_p (itype, val))
7703 return double_int_to_tree (itype, val);
7707 switch (DECL_FUNCTION_CODE (fndecl))
7709 CASE_FLT_FN (BUILT_IN_LFLOOR):
7710 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7711 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7712 if (tree_expr_nonnegative_p (arg))
7713 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7714 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7715 break;
7716 default:;
7719 return fold_fixed_mathfn (loc, fndecl, arg);
7722 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7723 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7724 the argument to the call. Return NULL_TREE if no simplification can
7725 be made. */
7727 static tree
7728 fold_builtin_bitop (tree fndecl, tree arg)
7730 if (!validate_arg (arg, INTEGER_TYPE))
7731 return NULL_TREE;
7733 /* Optimize for constant argument. */
7734 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7736 HOST_WIDE_INT hi, width, result;
7737 unsigned HOST_WIDE_INT lo;
7738 tree type;
7740 type = TREE_TYPE (arg);
7741 width = TYPE_PRECISION (type);
7742 lo = TREE_INT_CST_LOW (arg);
7744 /* Clear all the bits that are beyond the type's precision. */
7745 if (width > HOST_BITS_PER_WIDE_INT)
7747 hi = TREE_INT_CST_HIGH (arg);
7748 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7749 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7751 else
7753 hi = 0;
7754 if (width < HOST_BITS_PER_WIDE_INT)
7755 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7758 switch (DECL_FUNCTION_CODE (fndecl))
7760 CASE_INT_FN (BUILT_IN_FFS):
7761 if (lo != 0)
7762 result = ffs_hwi (lo);
7763 else if (hi != 0)
7764 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7765 else
7766 result = 0;
7767 break;
7769 CASE_INT_FN (BUILT_IN_CLZ):
7770 if (hi != 0)
7771 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7772 else if (lo != 0)
7773 result = width - floor_log2 (lo) - 1;
7774 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7775 result = width;
7776 break;
7778 CASE_INT_FN (BUILT_IN_CTZ):
7779 if (lo != 0)
7780 result = ctz_hwi (lo);
7781 else if (hi != 0)
7782 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7783 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7784 result = width;
7785 break;
7787 CASE_INT_FN (BUILT_IN_POPCOUNT):
7788 result = 0;
7789 while (lo)
7790 result++, lo &= lo - 1;
7791 while (hi)
7792 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7793 break;
7795 CASE_INT_FN (BUILT_IN_PARITY):
7796 result = 0;
7797 while (lo)
7798 result++, lo &= lo - 1;
7799 while (hi)
7800 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7801 result &= 1;
7802 break;
7804 default:
7805 gcc_unreachable ();
7808 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7811 return NULL_TREE;
7814 /* Fold function call to builtin_bswap and the long and long long
7815 variants. Return NULL_TREE if no simplification can be made. */
7816 static tree
7817 fold_builtin_bswap (tree fndecl, tree arg)
7819 if (! validate_arg (arg, INTEGER_TYPE))
7820 return NULL_TREE;
7822 /* Optimize constant value. */
7823 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7825 HOST_WIDE_INT hi, width, r_hi = 0;
7826 unsigned HOST_WIDE_INT lo, r_lo = 0;
7827 tree type;
7829 type = TREE_TYPE (arg);
7830 width = TYPE_PRECISION (type);
7831 lo = TREE_INT_CST_LOW (arg);
7832 hi = TREE_INT_CST_HIGH (arg);
7834 switch (DECL_FUNCTION_CODE (fndecl))
7836 case BUILT_IN_BSWAP32:
7837 case BUILT_IN_BSWAP64:
7839 int s;
7841 for (s = 0; s < width; s += 8)
7843 int d = width - s - 8;
7844 unsigned HOST_WIDE_INT byte;
7846 if (s < HOST_BITS_PER_WIDE_INT)
7847 byte = (lo >> s) & 0xff;
7848 else
7849 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7851 if (d < HOST_BITS_PER_WIDE_INT)
7852 r_lo |= byte << d;
7853 else
7854 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7858 break;
7860 default:
7861 gcc_unreachable ();
7864 if (width < HOST_BITS_PER_WIDE_INT)
7865 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7866 else
7867 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7870 return NULL_TREE;
7873 /* A subroutine of fold_builtin to fold the various logarithmic
7874 functions. Return NULL_TREE if no simplification can me made.
7875 FUNC is the corresponding MPFR logarithm function. */
7877 static tree
7878 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7879 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7881 if (validate_arg (arg, REAL_TYPE))
7883 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7884 tree res;
7885 const enum built_in_function fcode = builtin_mathfn_code (arg);
7887 /* Calculate the result when the argument is a constant. */
7888 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7889 return res;
7891 /* Special case, optimize logN(expN(x)) = x. */
7892 if (flag_unsafe_math_optimizations
7893 && ((func == mpfr_log
7894 && (fcode == BUILT_IN_EXP
7895 || fcode == BUILT_IN_EXPF
7896 || fcode == BUILT_IN_EXPL))
7897 || (func == mpfr_log2
7898 && (fcode == BUILT_IN_EXP2
7899 || fcode == BUILT_IN_EXP2F
7900 || fcode == BUILT_IN_EXP2L))
7901 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7902 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7904 /* Optimize logN(func()) for various exponential functions. We
7905 want to determine the value "x" and the power "exponent" in
7906 order to transform logN(x**exponent) into exponent*logN(x). */
7907 if (flag_unsafe_math_optimizations)
7909 tree exponent = 0, x = 0;
7911 switch (fcode)
7913 CASE_FLT_FN (BUILT_IN_EXP):
7914 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7915 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7916 dconst_e ()));
7917 exponent = CALL_EXPR_ARG (arg, 0);
7918 break;
7919 CASE_FLT_FN (BUILT_IN_EXP2):
7920 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7921 x = build_real (type, dconst2);
7922 exponent = CALL_EXPR_ARG (arg, 0);
7923 break;
7924 CASE_FLT_FN (BUILT_IN_EXP10):
7925 CASE_FLT_FN (BUILT_IN_POW10):
7926 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7928 REAL_VALUE_TYPE dconst10;
7929 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7930 x = build_real (type, dconst10);
7932 exponent = CALL_EXPR_ARG (arg, 0);
7933 break;
7934 CASE_FLT_FN (BUILT_IN_SQRT):
7935 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7936 x = CALL_EXPR_ARG (arg, 0);
7937 exponent = build_real (type, dconsthalf);
7938 break;
7939 CASE_FLT_FN (BUILT_IN_CBRT):
7940 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7941 x = CALL_EXPR_ARG (arg, 0);
7942 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7943 dconst_third ()));
7944 break;
7945 CASE_FLT_FN (BUILT_IN_POW):
7946 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7947 x = CALL_EXPR_ARG (arg, 0);
7948 exponent = CALL_EXPR_ARG (arg, 1);
7949 break;
7950 default:
7951 break;
7954 /* Now perform the optimization. */
7955 if (x && exponent)
7957 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7958 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7963 return NULL_TREE;
7966 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7967 NULL_TREE if no simplification can be made. */
7969 static tree
7970 fold_builtin_hypot (location_t loc, tree fndecl,
7971 tree arg0, tree arg1, tree type)
7973 tree res, narg0, narg1;
7975 if (!validate_arg (arg0, REAL_TYPE)
7976 || !validate_arg (arg1, REAL_TYPE))
7977 return NULL_TREE;
7979 /* Calculate the result when the argument is a constant. */
7980 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7981 return res;
7983 /* If either argument to hypot has a negate or abs, strip that off.
7984 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7985 narg0 = fold_strip_sign_ops (arg0);
7986 narg1 = fold_strip_sign_ops (arg1);
7987 if (narg0 || narg1)
7989 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7990 narg1 ? narg1 : arg1);
7993 /* If either argument is zero, hypot is fabs of the other. */
7994 if (real_zerop (arg0))
7995 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7996 else if (real_zerop (arg1))
7997 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7999 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8000 if (flag_unsafe_math_optimizations
8001 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8003 const REAL_VALUE_TYPE sqrt2_trunc
8004 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8005 return fold_build2_loc (loc, MULT_EXPR, type,
8006 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8007 build_real (type, sqrt2_trunc));
8010 return NULL_TREE;
8014 /* Fold a builtin function call to pow, powf, or powl. Return
8015 NULL_TREE if no simplification can be made. */
8016 static tree
8017 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8019 tree res;
8021 if (!validate_arg (arg0, REAL_TYPE)
8022 || !validate_arg (arg1, REAL_TYPE))
8023 return NULL_TREE;
8025 /* Calculate the result when the argument is a constant. */
8026 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8027 return res;
8029 /* Optimize pow(1.0,y) = 1.0. */
8030 if (real_onep (arg0))
8031 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8033 if (TREE_CODE (arg1) == REAL_CST
8034 && !TREE_OVERFLOW (arg1))
8036 REAL_VALUE_TYPE cint;
8037 REAL_VALUE_TYPE c;
8038 HOST_WIDE_INT n;
8040 c = TREE_REAL_CST (arg1);
8042 /* Optimize pow(x,0.0) = 1.0. */
8043 if (REAL_VALUES_EQUAL (c, dconst0))
8044 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8045 arg0);
8047 /* Optimize pow(x,1.0) = x. */
8048 if (REAL_VALUES_EQUAL (c, dconst1))
8049 return arg0;
8051 /* Optimize pow(x,-1.0) = 1.0/x. */
8052 if (REAL_VALUES_EQUAL (c, dconstm1))
8053 return fold_build2_loc (loc, RDIV_EXPR, type,
8054 build_real (type, dconst1), arg0);
8056 /* Optimize pow(x,0.5) = sqrt(x). */
8057 if (flag_unsafe_math_optimizations
8058 && REAL_VALUES_EQUAL (c, dconsthalf))
8060 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8062 if (sqrtfn != NULL_TREE)
8063 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8066 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8067 if (flag_unsafe_math_optimizations)
8069 const REAL_VALUE_TYPE dconstroot
8070 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8072 if (REAL_VALUES_EQUAL (c, dconstroot))
8074 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8075 if (cbrtfn != NULL_TREE)
8076 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8080 /* Check for an integer exponent. */
8081 n = real_to_integer (&c);
8082 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8083 if (real_identical (&c, &cint))
8085 /* Attempt to evaluate pow at compile-time, unless this should
8086 raise an exception. */
8087 if (TREE_CODE (arg0) == REAL_CST
8088 && !TREE_OVERFLOW (arg0)
8089 && (n > 0
8090 || (!flag_trapping_math && !flag_errno_math)
8091 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8093 REAL_VALUE_TYPE x;
8094 bool inexact;
8096 x = TREE_REAL_CST (arg0);
8097 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8098 if (flag_unsafe_math_optimizations || !inexact)
8099 return build_real (type, x);
8102 /* Strip sign ops from even integer powers. */
8103 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8105 tree narg0 = fold_strip_sign_ops (arg0);
8106 if (narg0)
8107 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8112 if (flag_unsafe_math_optimizations)
8114 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8116 /* Optimize pow(expN(x),y) = expN(x*y). */
8117 if (BUILTIN_EXPONENT_P (fcode))
8119 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8120 tree arg = CALL_EXPR_ARG (arg0, 0);
8121 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8122 return build_call_expr_loc (loc, expfn, 1, arg);
8125 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8126 if (BUILTIN_SQRT_P (fcode))
8128 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8129 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8130 build_real (type, dconsthalf));
8131 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8134 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8135 if (BUILTIN_CBRT_P (fcode))
8137 tree arg = CALL_EXPR_ARG (arg0, 0);
8138 if (tree_expr_nonnegative_p (arg))
8140 const REAL_VALUE_TYPE dconstroot
8141 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8142 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8143 build_real (type, dconstroot));
8144 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8148 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8149 if (fcode == BUILT_IN_POW
8150 || fcode == BUILT_IN_POWF
8151 || fcode == BUILT_IN_POWL)
8153 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8154 if (tree_expr_nonnegative_p (arg00))
8156 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8157 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8158 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8163 return NULL_TREE;
8166 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8167 Return NULL_TREE if no simplification can be made. */
8168 static tree
8169 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8170 tree arg0, tree arg1, tree type)
8172 if (!validate_arg (arg0, REAL_TYPE)
8173 || !validate_arg (arg1, INTEGER_TYPE))
8174 return NULL_TREE;
8176 /* Optimize pow(1.0,y) = 1.0. */
8177 if (real_onep (arg0))
8178 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8180 if (host_integerp (arg1, 0))
8182 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8184 /* Evaluate powi at compile-time. */
8185 if (TREE_CODE (arg0) == REAL_CST
8186 && !TREE_OVERFLOW (arg0))
8188 REAL_VALUE_TYPE x;
8189 x = TREE_REAL_CST (arg0);
8190 real_powi (&x, TYPE_MODE (type), &x, c);
8191 return build_real (type, x);
8194 /* Optimize pow(x,0) = 1.0. */
8195 if (c == 0)
8196 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8197 arg0);
8199 /* Optimize pow(x,1) = x. */
8200 if (c == 1)
8201 return arg0;
8203 /* Optimize pow(x,-1) = 1.0/x. */
8204 if (c == -1)
8205 return fold_build2_loc (loc, RDIV_EXPR, type,
8206 build_real (type, dconst1), arg0);
8209 return NULL_TREE;
8212 /* A subroutine of fold_builtin to fold the various exponent
8213 functions. Return NULL_TREE if no simplification can be made.
8214 FUNC is the corresponding MPFR exponent function. */
8216 static tree
8217 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8218 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8220 if (validate_arg (arg, REAL_TYPE))
8222 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8223 tree res;
8225 /* Calculate the result when the argument is a constant. */
8226 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8227 return res;
8229 /* Optimize expN(logN(x)) = x. */
8230 if (flag_unsafe_math_optimizations)
8232 const enum built_in_function fcode = builtin_mathfn_code (arg);
8234 if ((func == mpfr_exp
8235 && (fcode == BUILT_IN_LOG
8236 || fcode == BUILT_IN_LOGF
8237 || fcode == BUILT_IN_LOGL))
8238 || (func == mpfr_exp2
8239 && (fcode == BUILT_IN_LOG2
8240 || fcode == BUILT_IN_LOG2F
8241 || fcode == BUILT_IN_LOG2L))
8242 || (func == mpfr_exp10
8243 && (fcode == BUILT_IN_LOG10
8244 || fcode == BUILT_IN_LOG10F
8245 || fcode == BUILT_IN_LOG10L)))
8246 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8250 return NULL_TREE;
8253 /* Return true if VAR is a VAR_DECL or a component thereof. */
8255 static bool
8256 var_decl_component_p (tree var)
8258 tree inner = var;
8259 while (handled_component_p (inner))
8260 inner = TREE_OPERAND (inner, 0);
8261 return SSA_VAR_P (inner);
8264 /* Fold function call to builtin memset. Return
8265 NULL_TREE if no simplification can be made. */
8267 static tree
8268 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8269 tree type, bool ignore)
8271 tree var, ret, etype;
8272 unsigned HOST_WIDE_INT length, cval;
8274 if (! validate_arg (dest, POINTER_TYPE)
8275 || ! validate_arg (c, INTEGER_TYPE)
8276 || ! validate_arg (len, INTEGER_TYPE))
8277 return NULL_TREE;
8279 if (! host_integerp (len, 1))
8280 return NULL_TREE;
8282 /* If the LEN parameter is zero, return DEST. */
8283 if (integer_zerop (len))
8284 return omit_one_operand_loc (loc, type, dest, c);
8286 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8287 return NULL_TREE;
8289 var = dest;
8290 STRIP_NOPS (var);
8291 if (TREE_CODE (var) != ADDR_EXPR)
8292 return NULL_TREE;
8294 var = TREE_OPERAND (var, 0);
8295 if (TREE_THIS_VOLATILE (var))
8296 return NULL_TREE;
8298 etype = TREE_TYPE (var);
8299 if (TREE_CODE (etype) == ARRAY_TYPE)
8300 etype = TREE_TYPE (etype);
8302 if (!INTEGRAL_TYPE_P (etype)
8303 && !POINTER_TYPE_P (etype))
8304 return NULL_TREE;
8306 if (! var_decl_component_p (var))
8307 return NULL_TREE;
8309 length = tree_low_cst (len, 1);
8310 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8311 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8312 < length)
8313 return NULL_TREE;
8315 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8316 return NULL_TREE;
8318 if (integer_zerop (c))
8319 cval = 0;
8320 else
8322 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8323 return NULL_TREE;
8325 cval = TREE_INT_CST_LOW (c);
8326 cval &= 0xff;
8327 cval |= cval << 8;
8328 cval |= cval << 16;
8329 cval |= (cval << 31) << 1;
8332 ret = build_int_cst_type (etype, cval);
8333 var = build_fold_indirect_ref_loc (loc,
8334 fold_convert_loc (loc,
8335 build_pointer_type (etype),
8336 dest));
8337 ret = build2 (MODIFY_EXPR, etype, var, ret);
8338 if (ignore)
8339 return ret;
8341 return omit_one_operand_loc (loc, type, dest, ret);
8344 /* Fold function call to builtin memset. Return
8345 NULL_TREE if no simplification can be made. */
8347 static tree
8348 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8350 if (! validate_arg (dest, POINTER_TYPE)
8351 || ! validate_arg (size, INTEGER_TYPE))
8352 return NULL_TREE;
8354 if (!ignore)
8355 return NULL_TREE;
8357 /* New argument list transforming bzero(ptr x, int y) to
8358 memset(ptr x, int 0, size_t y). This is done this way
8359 so that if it isn't expanded inline, we fallback to
8360 calling bzero instead of memset. */
8362 return fold_builtin_memset (loc, dest, integer_zero_node,
8363 fold_convert_loc (loc, sizetype, size),
8364 void_type_node, ignore);
8367 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8368 NULL_TREE if no simplification can be made.
8369 If ENDP is 0, return DEST (like memcpy).
8370 If ENDP is 1, return DEST+LEN (like mempcpy).
8371 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8372 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8373 (memmove). */
8375 static tree
8376 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8377 tree len, tree type, bool ignore, int endp)
8379 tree destvar, srcvar, expr;
8381 if (! validate_arg (dest, POINTER_TYPE)
8382 || ! validate_arg (src, POINTER_TYPE)
8383 || ! validate_arg (len, INTEGER_TYPE))
8384 return NULL_TREE;
8386 /* If the LEN parameter is zero, return DEST. */
8387 if (integer_zerop (len))
8388 return omit_one_operand_loc (loc, type, dest, src);
8390 /* If SRC and DEST are the same (and not volatile), return
8391 DEST{,+LEN,+LEN-1}. */
8392 if (operand_equal_p (src, dest, 0))
8393 expr = len;
8394 else
8396 tree srctype, desttype;
8397 unsigned int src_align, dest_align;
8398 tree off0;
8400 if (endp == 3)
8402 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8403 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8405 /* Both DEST and SRC must be pointer types.
8406 ??? This is what old code did. Is the testing for pointer types
8407 really mandatory?
8409 If either SRC is readonly or length is 1, we can use memcpy. */
8410 if (!dest_align || !src_align)
8411 return NULL_TREE;
8412 if (readonly_data_expr (src)
8413 || (host_integerp (len, 1)
8414 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8415 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8417 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8418 if (!fn)
8419 return NULL_TREE;
8420 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8423 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8424 if (TREE_CODE (src) == ADDR_EXPR
8425 && TREE_CODE (dest) == ADDR_EXPR)
8427 tree src_base, dest_base, fn;
8428 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8429 HOST_WIDE_INT size = -1;
8430 HOST_WIDE_INT maxsize = -1;
8432 srcvar = TREE_OPERAND (src, 0);
8433 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8434 &size, &maxsize);
8435 destvar = TREE_OPERAND (dest, 0);
8436 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8437 &size, &maxsize);
8438 if (host_integerp (len, 1))
8439 maxsize = tree_low_cst (len, 1);
8440 else
8441 maxsize = -1;
8442 src_offset /= BITS_PER_UNIT;
8443 dest_offset /= BITS_PER_UNIT;
8444 if (SSA_VAR_P (src_base)
8445 && SSA_VAR_P (dest_base))
8447 if (operand_equal_p (src_base, dest_base, 0)
8448 && ranges_overlap_p (src_offset, maxsize,
8449 dest_offset, maxsize))
8450 return NULL_TREE;
8452 else if (TREE_CODE (src_base) == MEM_REF
8453 && TREE_CODE (dest_base) == MEM_REF)
8455 double_int off;
8456 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8457 TREE_OPERAND (dest_base, 0), 0))
8458 return NULL_TREE;
8459 off = double_int_add (mem_ref_offset (src_base),
8460 shwi_to_double_int (src_offset));
8461 if (!double_int_fits_in_shwi_p (off))
8462 return NULL_TREE;
8463 src_offset = off.low;
8464 off = double_int_add (mem_ref_offset (dest_base),
8465 shwi_to_double_int (dest_offset));
8466 if (!double_int_fits_in_shwi_p (off))
8467 return NULL_TREE;
8468 dest_offset = off.low;
8469 if (ranges_overlap_p (src_offset, maxsize,
8470 dest_offset, maxsize))
8471 return NULL_TREE;
8473 else
8474 return NULL_TREE;
8476 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8477 if (!fn)
8478 return NULL_TREE;
8479 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8482 /* If the destination and source do not alias optimize into
8483 memcpy as well. */
8484 if ((is_gimple_min_invariant (dest)
8485 || TREE_CODE (dest) == SSA_NAME)
8486 && (is_gimple_min_invariant (src)
8487 || TREE_CODE (src) == SSA_NAME))
8489 ao_ref destr, srcr;
8490 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8491 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8492 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8494 tree fn;
8495 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8496 if (!fn)
8497 return NULL_TREE;
8498 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8502 return NULL_TREE;
8505 if (!host_integerp (len, 0))
8506 return NULL_TREE;
8507 /* FIXME:
8508 This logic lose for arguments like (type *)malloc (sizeof (type)),
8509 since we strip the casts of up to VOID return value from malloc.
8510 Perhaps we ought to inherit type from non-VOID argument here? */
8511 STRIP_NOPS (src);
8512 STRIP_NOPS (dest);
8513 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8514 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8516 tree tem = TREE_OPERAND (src, 0);
8517 STRIP_NOPS (tem);
8518 if (tem != TREE_OPERAND (src, 0))
8519 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8521 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8523 tree tem = TREE_OPERAND (dest, 0);
8524 STRIP_NOPS (tem);
8525 if (tem != TREE_OPERAND (dest, 0))
8526 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8528 srctype = TREE_TYPE (TREE_TYPE (src));
8529 if (srctype
8530 && TREE_CODE (srctype) == ARRAY_TYPE
8531 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8533 srctype = TREE_TYPE (srctype);
8534 STRIP_NOPS (src);
8535 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8537 desttype = TREE_TYPE (TREE_TYPE (dest));
8538 if (desttype
8539 && TREE_CODE (desttype) == ARRAY_TYPE
8540 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8542 desttype = TREE_TYPE (desttype);
8543 STRIP_NOPS (dest);
8544 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8546 if (!srctype || !desttype
8547 || TREE_ADDRESSABLE (srctype)
8548 || TREE_ADDRESSABLE (desttype)
8549 || !TYPE_SIZE_UNIT (srctype)
8550 || !TYPE_SIZE_UNIT (desttype)
8551 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8552 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST)
8553 return NULL_TREE;
8555 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8556 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8557 if (dest_align < TYPE_ALIGN (desttype)
8558 || src_align < TYPE_ALIGN (srctype))
8559 return NULL_TREE;
8561 if (!ignore)
8562 dest = builtin_save_expr (dest);
8564 /* Build accesses at offset zero with a ref-all character type. */
8565 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8566 ptr_mode, true), 0);
8568 destvar = dest;
8569 STRIP_NOPS (destvar);
8570 if (TREE_CODE (destvar) == ADDR_EXPR
8571 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8572 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8573 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8574 else
8575 destvar = NULL_TREE;
8577 srcvar = src;
8578 STRIP_NOPS (srcvar);
8579 if (TREE_CODE (srcvar) == ADDR_EXPR
8580 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8581 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8583 if (!destvar
8584 || src_align >= TYPE_ALIGN (desttype))
8585 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8586 srcvar, off0);
8587 else if (!STRICT_ALIGNMENT)
8589 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8590 src_align);
8591 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8593 else
8594 srcvar = NULL_TREE;
8596 else
8597 srcvar = NULL_TREE;
8599 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8600 return NULL_TREE;
8602 if (srcvar == NULL_TREE)
8604 STRIP_NOPS (src);
8605 if (src_align >= TYPE_ALIGN (desttype))
8606 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8607 else
8609 if (STRICT_ALIGNMENT)
8610 return NULL_TREE;
8611 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8612 src_align);
8613 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8616 else if (destvar == NULL_TREE)
8618 STRIP_NOPS (dest);
8619 if (dest_align >= TYPE_ALIGN (srctype))
8620 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8621 else
8623 if (STRICT_ALIGNMENT)
8624 return NULL_TREE;
8625 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8626 dest_align);
8627 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8631 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8634 if (ignore)
8635 return expr;
8637 if (endp == 0 || endp == 3)
8638 return omit_one_operand_loc (loc, type, dest, expr);
8640 if (expr == len)
8641 expr = NULL_TREE;
8643 if (endp == 2)
8644 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8645 ssize_int (1));
8647 len = fold_convert_loc (loc, sizetype, len);
8648 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8649 dest = fold_convert_loc (loc, type, dest);
8650 if (expr)
8651 dest = omit_one_operand_loc (loc, type, dest, expr);
8652 return dest;
8655 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8656 If LEN is not NULL, it represents the length of the string to be
8657 copied. Return NULL_TREE if no simplification can be made. */
8659 tree
8660 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8662 tree fn;
8664 if (!validate_arg (dest, POINTER_TYPE)
8665 || !validate_arg (src, POINTER_TYPE))
8666 return NULL_TREE;
8668 /* If SRC and DEST are the same (and not volatile), return DEST. */
8669 if (operand_equal_p (src, dest, 0))
8670 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8672 if (optimize_function_for_size_p (cfun))
8673 return NULL_TREE;
8675 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8676 if (!fn)
8677 return NULL_TREE;
8679 if (!len)
8681 len = c_strlen (src, 1);
8682 if (! len || TREE_SIDE_EFFECTS (len))
8683 return NULL_TREE;
8686 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8687 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8688 build_call_expr_loc (loc, fn, 3, dest, src, len));
8691 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8692 Return NULL_TREE if no simplification can be made. */
8694 static tree
8695 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8697 tree fn, len, lenp1, call, type;
8699 if (!validate_arg (dest, POINTER_TYPE)
8700 || !validate_arg (src, POINTER_TYPE))
8701 return NULL_TREE;
8703 len = c_strlen (src, 1);
8704 if (!len
8705 || TREE_CODE (len) != INTEGER_CST)
8706 return NULL_TREE;
8708 if (optimize_function_for_size_p (cfun)
8709 /* If length is zero it's small enough. */
8710 && !integer_zerop (len))
8711 return NULL_TREE;
8713 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8714 if (!fn)
8715 return NULL_TREE;
8717 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8718 /* We use dest twice in building our expression. Save it from
8719 multiple expansions. */
8720 dest = builtin_save_expr (dest);
8721 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8723 type = TREE_TYPE (TREE_TYPE (fndecl));
8724 len = fold_convert_loc (loc, sizetype, len);
8725 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8726 dest = fold_convert_loc (loc, type, dest);
8727 dest = omit_one_operand_loc (loc, type, dest, call);
8728 return dest;
8731 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8732 If SLEN is not NULL, it represents the length of the source string.
8733 Return NULL_TREE if no simplification can be made. */
8735 tree
8736 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8737 tree src, tree len, tree slen)
8739 tree fn;
8741 if (!validate_arg (dest, POINTER_TYPE)
8742 || !validate_arg (src, POINTER_TYPE)
8743 || !validate_arg (len, INTEGER_TYPE))
8744 return NULL_TREE;
8746 /* If the LEN parameter is zero, return DEST. */
8747 if (integer_zerop (len))
8748 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8750 /* We can't compare slen with len as constants below if len is not a
8751 constant. */
8752 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8753 return NULL_TREE;
8755 if (!slen)
8756 slen = c_strlen (src, 1);
8758 /* Now, we must be passed a constant src ptr parameter. */
8759 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8760 return NULL_TREE;
8762 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8764 /* We do not support simplification of this case, though we do
8765 support it when expanding trees into RTL. */
8766 /* FIXME: generate a call to __builtin_memset. */
8767 if (tree_int_cst_lt (slen, len))
8768 return NULL_TREE;
8770 /* OK transform into builtin memcpy. */
8771 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8772 if (!fn)
8773 return NULL_TREE;
8774 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8775 build_call_expr_loc (loc, fn, 3, dest, src, len));
8778 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8779 arguments to the call, and TYPE is its return type.
8780 Return NULL_TREE if no simplification can be made. */
8782 static tree
8783 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8785 if (!validate_arg (arg1, POINTER_TYPE)
8786 || !validate_arg (arg2, INTEGER_TYPE)
8787 || !validate_arg (len, INTEGER_TYPE))
8788 return NULL_TREE;
8789 else
8791 const char *p1;
8793 if (TREE_CODE (arg2) != INTEGER_CST
8794 || !host_integerp (len, 1))
8795 return NULL_TREE;
8797 p1 = c_getstr (arg1);
8798 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8800 char c;
8801 const char *r;
8802 tree tem;
8804 if (target_char_cast (arg2, &c))
8805 return NULL_TREE;
8807 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8809 if (r == NULL)
8810 return build_int_cst (TREE_TYPE (arg1), 0);
8812 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8813 size_int (r - p1));
8814 return fold_convert_loc (loc, type, tem);
8816 return NULL_TREE;
8820 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8821 Return NULL_TREE if no simplification can be made. */
8823 static tree
8824 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8826 const char *p1, *p2;
8828 if (!validate_arg (arg1, POINTER_TYPE)
8829 || !validate_arg (arg2, POINTER_TYPE)
8830 || !validate_arg (len, INTEGER_TYPE))
8831 return NULL_TREE;
8833 /* If the LEN parameter is zero, return zero. */
8834 if (integer_zerop (len))
8835 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8836 arg1, arg2);
8838 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8839 if (operand_equal_p (arg1, arg2, 0))
8840 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8842 p1 = c_getstr (arg1);
8843 p2 = c_getstr (arg2);
8845 /* If all arguments are constant, and the value of len is not greater
8846 than the lengths of arg1 and arg2, evaluate at compile-time. */
8847 if (host_integerp (len, 1) && p1 && p2
8848 && compare_tree_int (len, strlen (p1) + 1) <= 0
8849 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8851 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8853 if (r > 0)
8854 return integer_one_node;
8855 else if (r < 0)
8856 return integer_minus_one_node;
8857 else
8858 return integer_zero_node;
8861 /* If len parameter is one, return an expression corresponding to
8862 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8863 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8865 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8866 tree cst_uchar_ptr_node
8867 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8869 tree ind1
8870 = fold_convert_loc (loc, integer_type_node,
8871 build1 (INDIRECT_REF, cst_uchar_node,
8872 fold_convert_loc (loc,
8873 cst_uchar_ptr_node,
8874 arg1)));
8875 tree ind2
8876 = fold_convert_loc (loc, integer_type_node,
8877 build1 (INDIRECT_REF, cst_uchar_node,
8878 fold_convert_loc (loc,
8879 cst_uchar_ptr_node,
8880 arg2)));
8881 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8884 return NULL_TREE;
8887 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8888 Return NULL_TREE if no simplification can be made. */
8890 static tree
8891 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8893 const char *p1, *p2;
8895 if (!validate_arg (arg1, POINTER_TYPE)
8896 || !validate_arg (arg2, POINTER_TYPE))
8897 return NULL_TREE;
8899 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8900 if (operand_equal_p (arg1, arg2, 0))
8901 return integer_zero_node;
8903 p1 = c_getstr (arg1);
8904 p2 = c_getstr (arg2);
8906 if (p1 && p2)
8908 const int i = strcmp (p1, p2);
8909 if (i < 0)
8910 return integer_minus_one_node;
8911 else if (i > 0)
8912 return integer_one_node;
8913 else
8914 return integer_zero_node;
8917 /* If the second arg is "", return *(const unsigned char*)arg1. */
8918 if (p2 && *p2 == '\0')
8920 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8921 tree cst_uchar_ptr_node
8922 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8924 return fold_convert_loc (loc, integer_type_node,
8925 build1 (INDIRECT_REF, cst_uchar_node,
8926 fold_convert_loc (loc,
8927 cst_uchar_ptr_node,
8928 arg1)));
8931 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8932 if (p1 && *p1 == '\0')
8934 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8935 tree cst_uchar_ptr_node
8936 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8938 tree temp
8939 = fold_convert_loc (loc, integer_type_node,
8940 build1 (INDIRECT_REF, cst_uchar_node,
8941 fold_convert_loc (loc,
8942 cst_uchar_ptr_node,
8943 arg2)));
8944 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8947 return NULL_TREE;
8950 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8951 Return NULL_TREE if no simplification can be made. */
8953 static tree
8954 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8956 const char *p1, *p2;
8958 if (!validate_arg (arg1, POINTER_TYPE)
8959 || !validate_arg (arg2, POINTER_TYPE)
8960 || !validate_arg (len, INTEGER_TYPE))
8961 return NULL_TREE;
8963 /* If the LEN parameter is zero, return zero. */
8964 if (integer_zerop (len))
8965 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8966 arg1, arg2);
8968 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8969 if (operand_equal_p (arg1, arg2, 0))
8970 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8972 p1 = c_getstr (arg1);
8973 p2 = c_getstr (arg2);
8975 if (host_integerp (len, 1) && p1 && p2)
8977 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8978 if (i > 0)
8979 return integer_one_node;
8980 else if (i < 0)
8981 return integer_minus_one_node;
8982 else
8983 return integer_zero_node;
8986 /* If the second arg is "", and the length is greater than zero,
8987 return *(const unsigned char*)arg1. */
8988 if (p2 && *p2 == '\0'
8989 && TREE_CODE (len) == INTEGER_CST
8990 && tree_int_cst_sgn (len) == 1)
8992 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8993 tree cst_uchar_ptr_node
8994 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8996 return fold_convert_loc (loc, integer_type_node,
8997 build1 (INDIRECT_REF, cst_uchar_node,
8998 fold_convert_loc (loc,
8999 cst_uchar_ptr_node,
9000 arg1)));
9003 /* If the first arg is "", and the length is greater than zero,
9004 return -*(const unsigned char*)arg2. */
9005 if (p1 && *p1 == '\0'
9006 && TREE_CODE (len) == INTEGER_CST
9007 && tree_int_cst_sgn (len) == 1)
9009 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9010 tree cst_uchar_ptr_node
9011 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9013 tree temp = fold_convert_loc (loc, integer_type_node,
9014 build1 (INDIRECT_REF, cst_uchar_node,
9015 fold_convert_loc (loc,
9016 cst_uchar_ptr_node,
9017 arg2)));
9018 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9021 /* If len parameter is one, return an expression corresponding to
9022 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9023 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9025 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9026 tree cst_uchar_ptr_node
9027 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9029 tree ind1 = fold_convert_loc (loc, integer_type_node,
9030 build1 (INDIRECT_REF, cst_uchar_node,
9031 fold_convert_loc (loc,
9032 cst_uchar_ptr_node,
9033 arg1)));
9034 tree ind2 = fold_convert_loc (loc, integer_type_node,
9035 build1 (INDIRECT_REF, cst_uchar_node,
9036 fold_convert_loc (loc,
9037 cst_uchar_ptr_node,
9038 arg2)));
9039 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9042 return NULL_TREE;
9045 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9046 ARG. Return NULL_TREE if no simplification can be made. */
9048 static tree
9049 fold_builtin_signbit (location_t loc, tree arg, tree type)
9051 if (!validate_arg (arg, REAL_TYPE))
9052 return NULL_TREE;
9054 /* If ARG is a compile-time constant, determine the result. */
9055 if (TREE_CODE (arg) == REAL_CST
9056 && !TREE_OVERFLOW (arg))
9058 REAL_VALUE_TYPE c;
9060 c = TREE_REAL_CST (arg);
9061 return (REAL_VALUE_NEGATIVE (c)
9062 ? build_one_cst (type)
9063 : build_zero_cst (type));
9066 /* If ARG is non-negative, the result is always zero. */
9067 if (tree_expr_nonnegative_p (arg))
9068 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9070 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9071 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9072 return fold_build2_loc (loc, LT_EXPR, type, arg,
9073 build_real (TREE_TYPE (arg), dconst0));
9075 return NULL_TREE;
9078 /* Fold function call to builtin copysign, copysignf or copysignl with
9079 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9080 be made. */
9082 static tree
9083 fold_builtin_copysign (location_t loc, tree fndecl,
9084 tree arg1, tree arg2, tree type)
9086 tree tem;
9088 if (!validate_arg (arg1, REAL_TYPE)
9089 || !validate_arg (arg2, REAL_TYPE))
9090 return NULL_TREE;
9092 /* copysign(X,X) is X. */
9093 if (operand_equal_p (arg1, arg2, 0))
9094 return fold_convert_loc (loc, type, arg1);
9096 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9097 if (TREE_CODE (arg1) == REAL_CST
9098 && TREE_CODE (arg2) == REAL_CST
9099 && !TREE_OVERFLOW (arg1)
9100 && !TREE_OVERFLOW (arg2))
9102 REAL_VALUE_TYPE c1, c2;
9104 c1 = TREE_REAL_CST (arg1);
9105 c2 = TREE_REAL_CST (arg2);
9106 /* c1.sign := c2.sign. */
9107 real_copysign (&c1, &c2);
9108 return build_real (type, c1);
9111 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9112 Remember to evaluate Y for side-effects. */
9113 if (tree_expr_nonnegative_p (arg2))
9114 return omit_one_operand_loc (loc, type,
9115 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9116 arg2);
9118 /* Strip sign changing operations for the first argument. */
9119 tem = fold_strip_sign_ops (arg1);
9120 if (tem)
9121 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9123 return NULL_TREE;
9126 /* Fold a call to builtin isascii with argument ARG. */
9128 static tree
9129 fold_builtin_isascii (location_t loc, tree arg)
9131 if (!validate_arg (arg, INTEGER_TYPE))
9132 return NULL_TREE;
9133 else
9135 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9136 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9137 build_int_cst (NULL_TREE,
9138 ~ (unsigned HOST_WIDE_INT) 0x7f));
9139 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9140 arg, integer_zero_node);
9144 /* Fold a call to builtin toascii with argument ARG. */
9146 static tree
9147 fold_builtin_toascii (location_t loc, tree arg)
9149 if (!validate_arg (arg, INTEGER_TYPE))
9150 return NULL_TREE;
9152 /* Transform toascii(c) -> (c & 0x7f). */
9153 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9154 build_int_cst (NULL_TREE, 0x7f));
9157 /* Fold a call to builtin isdigit with argument ARG. */
9159 static tree
9160 fold_builtin_isdigit (location_t loc, tree arg)
9162 if (!validate_arg (arg, INTEGER_TYPE))
9163 return NULL_TREE;
9164 else
9166 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9167 /* According to the C standard, isdigit is unaffected by locale.
9168 However, it definitely is affected by the target character set. */
9169 unsigned HOST_WIDE_INT target_digit0
9170 = lang_hooks.to_target_charset ('0');
9172 if (target_digit0 == 0)
9173 return NULL_TREE;
9175 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9176 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9177 build_int_cst (unsigned_type_node, target_digit0));
9178 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9179 build_int_cst (unsigned_type_node, 9));
9183 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9185 static tree
9186 fold_builtin_fabs (location_t loc, tree arg, tree type)
9188 if (!validate_arg (arg, REAL_TYPE))
9189 return NULL_TREE;
9191 arg = fold_convert_loc (loc, type, arg);
9192 if (TREE_CODE (arg) == REAL_CST)
9193 return fold_abs_const (arg, type);
9194 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9197 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9199 static tree
9200 fold_builtin_abs (location_t loc, tree arg, tree type)
9202 if (!validate_arg (arg, INTEGER_TYPE))
9203 return NULL_TREE;
9205 arg = fold_convert_loc (loc, type, arg);
9206 if (TREE_CODE (arg) == INTEGER_CST)
9207 return fold_abs_const (arg, type);
9208 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9211 /* Fold a fma operation with arguments ARG[012]. */
9213 tree
9214 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9215 tree type, tree arg0, tree arg1, tree arg2)
9217 if (TREE_CODE (arg0) == REAL_CST
9218 && TREE_CODE (arg1) == REAL_CST
9219 && TREE_CODE (arg2) == REAL_CST)
9220 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9222 return NULL_TREE;
9225 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9227 static tree
9228 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9230 if (validate_arg (arg0, REAL_TYPE)
9231 && validate_arg(arg1, REAL_TYPE)
9232 && validate_arg(arg2, REAL_TYPE))
9234 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9235 if (tem)
9236 return tem;
9238 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9239 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9240 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9242 return NULL_TREE;
9245 /* Fold a call to builtin fmin or fmax. */
9247 static tree
9248 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9249 tree type, bool max)
9251 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9253 /* Calculate the result when the argument is a constant. */
9254 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9256 if (res)
9257 return res;
9259 /* If either argument is NaN, return the other one. Avoid the
9260 transformation if we get (and honor) a signalling NaN. Using
9261 omit_one_operand() ensures we create a non-lvalue. */
9262 if (TREE_CODE (arg0) == REAL_CST
9263 && real_isnan (&TREE_REAL_CST (arg0))
9264 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9265 || ! TREE_REAL_CST (arg0).signalling))
9266 return omit_one_operand_loc (loc, type, arg1, arg0);
9267 if (TREE_CODE (arg1) == REAL_CST
9268 && real_isnan (&TREE_REAL_CST (arg1))
9269 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9270 || ! TREE_REAL_CST (arg1).signalling))
9271 return omit_one_operand_loc (loc, type, arg0, arg1);
9273 /* Transform fmin/fmax(x,x) -> x. */
9274 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9275 return omit_one_operand_loc (loc, type, arg0, arg1);
9277 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9278 functions to return the numeric arg if the other one is NaN.
9279 These tree codes don't honor that, so only transform if
9280 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9281 handled, so we don't have to worry about it either. */
9282 if (flag_finite_math_only)
9283 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9284 fold_convert_loc (loc, type, arg0),
9285 fold_convert_loc (loc, type, arg1));
9287 return NULL_TREE;
9290 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9292 static tree
9293 fold_builtin_carg (location_t loc, tree arg, tree type)
9295 if (validate_arg (arg, COMPLEX_TYPE)
9296 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9298 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9300 if (atan2_fn)
9302 tree new_arg = builtin_save_expr (arg);
9303 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9304 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9305 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9309 return NULL_TREE;
9312 /* Fold a call to builtin logb/ilogb. */
9314 static tree
9315 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9317 if (! validate_arg (arg, REAL_TYPE))
9318 return NULL_TREE;
9320 STRIP_NOPS (arg);
9322 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9324 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9326 switch (value->cl)
9328 case rvc_nan:
9329 case rvc_inf:
9330 /* If arg is Inf or NaN and we're logb, return it. */
9331 if (TREE_CODE (rettype) == REAL_TYPE)
9332 return fold_convert_loc (loc, rettype, arg);
9333 /* Fall through... */
9334 case rvc_zero:
9335 /* Zero may set errno and/or raise an exception for logb, also
9336 for ilogb we don't know FP_ILOGB0. */
9337 return NULL_TREE;
9338 case rvc_normal:
9339 /* For normal numbers, proceed iff radix == 2. In GCC,
9340 normalized significands are in the range [0.5, 1.0). We
9341 want the exponent as if they were [1.0, 2.0) so get the
9342 exponent and subtract 1. */
9343 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9344 return fold_convert_loc (loc, rettype,
9345 build_int_cst (NULL_TREE,
9346 REAL_EXP (value)-1));
9347 break;
9351 return NULL_TREE;
9354 /* Fold a call to builtin significand, if radix == 2. */
9356 static tree
9357 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9359 if (! validate_arg (arg, REAL_TYPE))
9360 return NULL_TREE;
9362 STRIP_NOPS (arg);
9364 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9366 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9368 switch (value->cl)
9370 case rvc_zero:
9371 case rvc_nan:
9372 case rvc_inf:
9373 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9374 return fold_convert_loc (loc, rettype, arg);
9375 case rvc_normal:
9376 /* For normal numbers, proceed iff radix == 2. */
9377 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9379 REAL_VALUE_TYPE result = *value;
9380 /* In GCC, normalized significands are in the range [0.5,
9381 1.0). We want them to be [1.0, 2.0) so set the
9382 exponent to 1. */
9383 SET_REAL_EXP (&result, 1);
9384 return build_real (rettype, result);
9386 break;
9390 return NULL_TREE;
9393 /* Fold a call to builtin frexp, we can assume the base is 2. */
9395 static tree
9396 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9398 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9399 return NULL_TREE;
9401 STRIP_NOPS (arg0);
9403 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9404 return NULL_TREE;
9406 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9408 /* Proceed if a valid pointer type was passed in. */
9409 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9411 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9412 tree frac, exp;
9414 switch (value->cl)
9416 case rvc_zero:
9417 /* For +-0, return (*exp = 0, +-0). */
9418 exp = integer_zero_node;
9419 frac = arg0;
9420 break;
9421 case rvc_nan:
9422 case rvc_inf:
9423 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9424 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9425 case rvc_normal:
9427 /* Since the frexp function always expects base 2, and in
9428 GCC normalized significands are already in the range
9429 [0.5, 1.0), we have exactly what frexp wants. */
9430 REAL_VALUE_TYPE frac_rvt = *value;
9431 SET_REAL_EXP (&frac_rvt, 0);
9432 frac = build_real (rettype, frac_rvt);
9433 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9435 break;
9436 default:
9437 gcc_unreachable ();
9440 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9441 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9442 TREE_SIDE_EFFECTS (arg1) = 1;
9443 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9446 return NULL_TREE;
9449 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9450 then we can assume the base is two. If it's false, then we have to
9451 check the mode of the TYPE parameter in certain cases. */
9453 static tree
9454 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9455 tree type, bool ldexp)
9457 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9459 STRIP_NOPS (arg0);
9460 STRIP_NOPS (arg1);
9462 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9463 if (real_zerop (arg0) || integer_zerop (arg1)
9464 || (TREE_CODE (arg0) == REAL_CST
9465 && !real_isfinite (&TREE_REAL_CST (arg0))))
9466 return omit_one_operand_loc (loc, type, arg0, arg1);
9468 /* If both arguments are constant, then try to evaluate it. */
9469 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9470 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9471 && host_integerp (arg1, 0))
9473 /* Bound the maximum adjustment to twice the range of the
9474 mode's valid exponents. Use abs to ensure the range is
9475 positive as a sanity check. */
9476 const long max_exp_adj = 2 *
9477 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9478 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9480 /* Get the user-requested adjustment. */
9481 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9483 /* The requested adjustment must be inside this range. This
9484 is a preliminary cap to avoid things like overflow, we
9485 may still fail to compute the result for other reasons. */
9486 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9488 REAL_VALUE_TYPE initial_result;
9490 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9492 /* Ensure we didn't overflow. */
9493 if (! real_isinf (&initial_result))
9495 const REAL_VALUE_TYPE trunc_result
9496 = real_value_truncate (TYPE_MODE (type), initial_result);
9498 /* Only proceed if the target mode can hold the
9499 resulting value. */
9500 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9501 return build_real (type, trunc_result);
9507 return NULL_TREE;
9510 /* Fold a call to builtin modf. */
9512 static tree
9513 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9515 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9516 return NULL_TREE;
9518 STRIP_NOPS (arg0);
9520 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9521 return NULL_TREE;
9523 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9525 /* Proceed if a valid pointer type was passed in. */
9526 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9528 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9529 REAL_VALUE_TYPE trunc, frac;
9531 switch (value->cl)
9533 case rvc_nan:
9534 case rvc_zero:
9535 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9536 trunc = frac = *value;
9537 break;
9538 case rvc_inf:
9539 /* For +-Inf, return (*arg1 = arg0, +-0). */
9540 frac = dconst0;
9541 frac.sign = value->sign;
9542 trunc = *value;
9543 break;
9544 case rvc_normal:
9545 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9546 real_trunc (&trunc, VOIDmode, value);
9547 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9548 /* If the original number was negative and already
9549 integral, then the fractional part is -0.0. */
9550 if (value->sign && frac.cl == rvc_zero)
9551 frac.sign = value->sign;
9552 break;
9555 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9556 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9557 build_real (rettype, trunc));
9558 TREE_SIDE_EFFECTS (arg1) = 1;
9559 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9560 build_real (rettype, frac));
9563 return NULL_TREE;
9566 /* Given a location LOC, an interclass builtin function decl FNDECL
9567 and its single argument ARG, return an folded expression computing
9568 the same, or NULL_TREE if we either couldn't or didn't want to fold
9569 (the latter happen if there's an RTL instruction available). */
9571 static tree
9572 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9574 enum machine_mode mode;
9576 if (!validate_arg (arg, REAL_TYPE))
9577 return NULL_TREE;
9579 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9580 return NULL_TREE;
9582 mode = TYPE_MODE (TREE_TYPE (arg));
9584 /* If there is no optab, try generic code. */
9585 switch (DECL_FUNCTION_CODE (fndecl))
9587 tree result;
9589 CASE_FLT_FN (BUILT_IN_ISINF):
9591 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9592 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9593 tree const type = TREE_TYPE (arg);
9594 REAL_VALUE_TYPE r;
9595 char buf[128];
9597 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9598 real_from_string (&r, buf);
9599 result = build_call_expr (isgr_fn, 2,
9600 fold_build1_loc (loc, ABS_EXPR, type, arg),
9601 build_real (type, r));
9602 return result;
9604 CASE_FLT_FN (BUILT_IN_FINITE):
9605 case BUILT_IN_ISFINITE:
9607 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9608 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9609 tree const type = TREE_TYPE (arg);
9610 REAL_VALUE_TYPE r;
9611 char buf[128];
9613 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9614 real_from_string (&r, buf);
9615 result = build_call_expr (isle_fn, 2,
9616 fold_build1_loc (loc, ABS_EXPR, type, arg),
9617 build_real (type, r));
9618 /*result = fold_build2_loc (loc, UNGT_EXPR,
9619 TREE_TYPE (TREE_TYPE (fndecl)),
9620 fold_build1_loc (loc, ABS_EXPR, type, arg),
9621 build_real (type, r));
9622 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9623 TREE_TYPE (TREE_TYPE (fndecl)),
9624 result);*/
9625 return result;
9627 case BUILT_IN_ISNORMAL:
9629 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9630 islessequal(fabs(x),DBL_MAX). */
9631 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9632 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9633 tree const type = TREE_TYPE (arg);
9634 REAL_VALUE_TYPE rmax, rmin;
9635 char buf[128];
9637 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9638 real_from_string (&rmax, buf);
9639 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9640 real_from_string (&rmin, buf);
9641 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9642 result = build_call_expr (isle_fn, 2, arg,
9643 build_real (type, rmax));
9644 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9645 build_call_expr (isge_fn, 2, arg,
9646 build_real (type, rmin)));
9647 return result;
9649 default:
9650 break;
9653 return NULL_TREE;
9656 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9657 ARG is the argument for the call. */
9659 static tree
9660 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9662 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9663 REAL_VALUE_TYPE r;
9665 if (!validate_arg (arg, REAL_TYPE))
9666 return NULL_TREE;
9668 switch (builtin_index)
9670 case BUILT_IN_ISINF:
9671 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9672 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9674 if (TREE_CODE (arg) == REAL_CST)
9676 r = TREE_REAL_CST (arg);
9677 if (real_isinf (&r))
9678 return real_compare (GT_EXPR, &r, &dconst0)
9679 ? integer_one_node : integer_minus_one_node;
9680 else
9681 return integer_zero_node;
9684 return NULL_TREE;
9686 case BUILT_IN_ISINF_SIGN:
9688 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9689 /* In a boolean context, GCC will fold the inner COND_EXPR to
9690 1. So e.g. "if (isinf_sign(x))" would be folded to just
9691 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9692 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9693 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9694 tree tmp = NULL_TREE;
9696 arg = builtin_save_expr (arg);
9698 if (signbit_fn && isinf_fn)
9700 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9701 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9703 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9704 signbit_call, integer_zero_node);
9705 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9706 isinf_call, integer_zero_node);
9708 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9709 integer_minus_one_node, integer_one_node);
9710 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9711 isinf_call, tmp,
9712 integer_zero_node);
9715 return tmp;
9718 case BUILT_IN_ISFINITE:
9719 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9720 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9721 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9723 if (TREE_CODE (arg) == REAL_CST)
9725 r = TREE_REAL_CST (arg);
9726 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9729 return NULL_TREE;
9731 case BUILT_IN_ISNAN:
9732 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9733 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9735 if (TREE_CODE (arg) == REAL_CST)
9737 r = TREE_REAL_CST (arg);
9738 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9741 arg = builtin_save_expr (arg);
9742 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9744 default:
9745 gcc_unreachable ();
9749 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9750 This builtin will generate code to return the appropriate floating
9751 point classification depending on the value of the floating point
9752 number passed in. The possible return values must be supplied as
9753 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9754 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9755 one floating point argument which is "type generic". */
9757 static tree
9758 fold_builtin_fpclassify (location_t loc, tree exp)
9760 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9761 arg, type, res, tmp;
9762 enum machine_mode mode;
9763 REAL_VALUE_TYPE r;
9764 char buf[128];
9766 /* Verify the required arguments in the original call. */
9767 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9768 INTEGER_TYPE, INTEGER_TYPE,
9769 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9770 return NULL_TREE;
9772 fp_nan = CALL_EXPR_ARG (exp, 0);
9773 fp_infinite = CALL_EXPR_ARG (exp, 1);
9774 fp_normal = CALL_EXPR_ARG (exp, 2);
9775 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9776 fp_zero = CALL_EXPR_ARG (exp, 4);
9777 arg = CALL_EXPR_ARG (exp, 5);
9778 type = TREE_TYPE (arg);
9779 mode = TYPE_MODE (type);
9780 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9782 /* fpclassify(x) ->
9783 isnan(x) ? FP_NAN :
9784 (fabs(x) == Inf ? FP_INFINITE :
9785 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9786 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9788 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9789 build_real (type, dconst0));
9790 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9791 tmp, fp_zero, fp_subnormal);
9793 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9794 real_from_string (&r, buf);
9795 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9796 arg, build_real (type, r));
9797 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9799 if (HONOR_INFINITIES (mode))
9801 real_inf (&r);
9802 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9803 build_real (type, r));
9804 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9805 fp_infinite, res);
9808 if (HONOR_NANS (mode))
9810 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9811 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9814 return res;
9817 /* Fold a call to an unordered comparison function such as
9818 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9819 being called and ARG0 and ARG1 are the arguments for the call.
9820 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9821 the opposite of the desired result. UNORDERED_CODE is used
9822 for modes that can hold NaNs and ORDERED_CODE is used for
9823 the rest. */
9825 static tree
9826 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9827 enum tree_code unordered_code,
9828 enum tree_code ordered_code)
9830 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9831 enum tree_code code;
9832 tree type0, type1;
9833 enum tree_code code0, code1;
9834 tree cmp_type = NULL_TREE;
9836 type0 = TREE_TYPE (arg0);
9837 type1 = TREE_TYPE (arg1);
9839 code0 = TREE_CODE (type0);
9840 code1 = TREE_CODE (type1);
9842 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9843 /* Choose the wider of two real types. */
9844 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9845 ? type0 : type1;
9846 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9847 cmp_type = type0;
9848 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9849 cmp_type = type1;
9851 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9852 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9854 if (unordered_code == UNORDERED_EXPR)
9856 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9857 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9858 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9861 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9862 : ordered_code;
9863 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9864 fold_build2_loc (loc, code, type, arg0, arg1));
9867 /* Fold a call to built-in function FNDECL with 0 arguments.
9868 IGNORE is true if the result of the function call is ignored. This
9869 function returns NULL_TREE if no simplification was possible. */
9871 static tree
9872 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9874 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9875 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9876 switch (fcode)
9878 CASE_FLT_FN (BUILT_IN_INF):
9879 case BUILT_IN_INFD32:
9880 case BUILT_IN_INFD64:
9881 case BUILT_IN_INFD128:
9882 return fold_builtin_inf (loc, type, true);
9884 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9885 return fold_builtin_inf (loc, type, false);
9887 case BUILT_IN_CLASSIFY_TYPE:
9888 return fold_builtin_classify_type (NULL_TREE);
9890 default:
9891 break;
9893 return NULL_TREE;
9896 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9897 IGNORE is true if the result of the function call is ignored. This
9898 function returns NULL_TREE if no simplification was possible. */
9900 static tree
9901 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9903 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9904 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9905 switch (fcode)
9907 case BUILT_IN_CONSTANT_P:
9909 tree val = fold_builtin_constant_p (arg0);
9911 /* Gimplification will pull the CALL_EXPR for the builtin out of
9912 an if condition. When not optimizing, we'll not CSE it back.
9913 To avoid link error types of regressions, return false now. */
9914 if (!val && !optimize)
9915 val = integer_zero_node;
9917 return val;
9920 case BUILT_IN_CLASSIFY_TYPE:
9921 return fold_builtin_classify_type (arg0);
9923 case BUILT_IN_STRLEN:
9924 return fold_builtin_strlen (loc, type, arg0);
9926 CASE_FLT_FN (BUILT_IN_FABS):
9927 return fold_builtin_fabs (loc, arg0, type);
9929 case BUILT_IN_ABS:
9930 case BUILT_IN_LABS:
9931 case BUILT_IN_LLABS:
9932 case BUILT_IN_IMAXABS:
9933 return fold_builtin_abs (loc, arg0, type);
9935 CASE_FLT_FN (BUILT_IN_CONJ):
9936 if (validate_arg (arg0, COMPLEX_TYPE)
9937 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9938 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9939 break;
9941 CASE_FLT_FN (BUILT_IN_CREAL):
9942 if (validate_arg (arg0, COMPLEX_TYPE)
9943 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9944 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9945 break;
9947 CASE_FLT_FN (BUILT_IN_CIMAG):
9948 if (validate_arg (arg0, COMPLEX_TYPE)
9949 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9950 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9951 break;
9953 CASE_FLT_FN (BUILT_IN_CCOS):
9954 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9956 CASE_FLT_FN (BUILT_IN_CCOSH):
9957 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9959 CASE_FLT_FN (BUILT_IN_CPROJ):
9960 return fold_builtin_cproj(loc, arg0, type);
9962 CASE_FLT_FN (BUILT_IN_CSIN):
9963 if (validate_arg (arg0, COMPLEX_TYPE)
9964 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9965 return do_mpc_arg1 (arg0, type, mpc_sin);
9966 break;
9968 CASE_FLT_FN (BUILT_IN_CSINH):
9969 if (validate_arg (arg0, COMPLEX_TYPE)
9970 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9971 return do_mpc_arg1 (arg0, type, mpc_sinh);
9972 break;
9974 CASE_FLT_FN (BUILT_IN_CTAN):
9975 if (validate_arg (arg0, COMPLEX_TYPE)
9976 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9977 return do_mpc_arg1 (arg0, type, mpc_tan);
9978 break;
9980 CASE_FLT_FN (BUILT_IN_CTANH):
9981 if (validate_arg (arg0, COMPLEX_TYPE)
9982 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9983 return do_mpc_arg1 (arg0, type, mpc_tanh);
9984 break;
9986 CASE_FLT_FN (BUILT_IN_CLOG):
9987 if (validate_arg (arg0, COMPLEX_TYPE)
9988 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9989 return do_mpc_arg1 (arg0, type, mpc_log);
9990 break;
9992 CASE_FLT_FN (BUILT_IN_CSQRT):
9993 if (validate_arg (arg0, COMPLEX_TYPE)
9994 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9995 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9996 break;
9998 CASE_FLT_FN (BUILT_IN_CASIN):
9999 if (validate_arg (arg0, COMPLEX_TYPE)
10000 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10001 return do_mpc_arg1 (arg0, type, mpc_asin);
10002 break;
10004 CASE_FLT_FN (BUILT_IN_CACOS):
10005 if (validate_arg (arg0, COMPLEX_TYPE)
10006 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10007 return do_mpc_arg1 (arg0, type, mpc_acos);
10008 break;
10010 CASE_FLT_FN (BUILT_IN_CATAN):
10011 if (validate_arg (arg0, COMPLEX_TYPE)
10012 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10013 return do_mpc_arg1 (arg0, type, mpc_atan);
10014 break;
10016 CASE_FLT_FN (BUILT_IN_CASINH):
10017 if (validate_arg (arg0, COMPLEX_TYPE)
10018 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10019 return do_mpc_arg1 (arg0, type, mpc_asinh);
10020 break;
10022 CASE_FLT_FN (BUILT_IN_CACOSH):
10023 if (validate_arg (arg0, COMPLEX_TYPE)
10024 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10025 return do_mpc_arg1 (arg0, type, mpc_acosh);
10026 break;
10028 CASE_FLT_FN (BUILT_IN_CATANH):
10029 if (validate_arg (arg0, COMPLEX_TYPE)
10030 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10031 return do_mpc_arg1 (arg0, type, mpc_atanh);
10032 break;
10034 CASE_FLT_FN (BUILT_IN_CABS):
10035 return fold_builtin_cabs (loc, arg0, type, fndecl);
10037 CASE_FLT_FN (BUILT_IN_CARG):
10038 return fold_builtin_carg (loc, arg0, type);
10040 CASE_FLT_FN (BUILT_IN_SQRT):
10041 return fold_builtin_sqrt (loc, arg0, type);
10043 CASE_FLT_FN (BUILT_IN_CBRT):
10044 return fold_builtin_cbrt (loc, arg0, type);
10046 CASE_FLT_FN (BUILT_IN_ASIN):
10047 if (validate_arg (arg0, REAL_TYPE))
10048 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10049 &dconstm1, &dconst1, true);
10050 break;
10052 CASE_FLT_FN (BUILT_IN_ACOS):
10053 if (validate_arg (arg0, REAL_TYPE))
10054 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10055 &dconstm1, &dconst1, true);
10056 break;
10058 CASE_FLT_FN (BUILT_IN_ATAN):
10059 if (validate_arg (arg0, REAL_TYPE))
10060 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10061 break;
10063 CASE_FLT_FN (BUILT_IN_ASINH):
10064 if (validate_arg (arg0, REAL_TYPE))
10065 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10066 break;
10068 CASE_FLT_FN (BUILT_IN_ACOSH):
10069 if (validate_arg (arg0, REAL_TYPE))
10070 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10071 &dconst1, NULL, true);
10072 break;
10074 CASE_FLT_FN (BUILT_IN_ATANH):
10075 if (validate_arg (arg0, REAL_TYPE))
10076 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10077 &dconstm1, &dconst1, false);
10078 break;
10080 CASE_FLT_FN (BUILT_IN_SIN):
10081 if (validate_arg (arg0, REAL_TYPE))
10082 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10083 break;
10085 CASE_FLT_FN (BUILT_IN_COS):
10086 return fold_builtin_cos (loc, arg0, type, fndecl);
10088 CASE_FLT_FN (BUILT_IN_TAN):
10089 return fold_builtin_tan (arg0, type);
10091 CASE_FLT_FN (BUILT_IN_CEXP):
10092 return fold_builtin_cexp (loc, arg0, type);
10094 CASE_FLT_FN (BUILT_IN_CEXPI):
10095 if (validate_arg (arg0, REAL_TYPE))
10096 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10097 break;
10099 CASE_FLT_FN (BUILT_IN_SINH):
10100 if (validate_arg (arg0, REAL_TYPE))
10101 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10102 break;
10104 CASE_FLT_FN (BUILT_IN_COSH):
10105 return fold_builtin_cosh (loc, arg0, type, fndecl);
10107 CASE_FLT_FN (BUILT_IN_TANH):
10108 if (validate_arg (arg0, REAL_TYPE))
10109 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10110 break;
10112 CASE_FLT_FN (BUILT_IN_ERF):
10113 if (validate_arg (arg0, REAL_TYPE))
10114 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10115 break;
10117 CASE_FLT_FN (BUILT_IN_ERFC):
10118 if (validate_arg (arg0, REAL_TYPE))
10119 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10120 break;
10122 CASE_FLT_FN (BUILT_IN_TGAMMA):
10123 if (validate_arg (arg0, REAL_TYPE))
10124 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10125 break;
10127 CASE_FLT_FN (BUILT_IN_EXP):
10128 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10130 CASE_FLT_FN (BUILT_IN_EXP2):
10131 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10133 CASE_FLT_FN (BUILT_IN_EXP10):
10134 CASE_FLT_FN (BUILT_IN_POW10):
10135 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10137 CASE_FLT_FN (BUILT_IN_EXPM1):
10138 if (validate_arg (arg0, REAL_TYPE))
10139 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10140 break;
10142 CASE_FLT_FN (BUILT_IN_LOG):
10143 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10145 CASE_FLT_FN (BUILT_IN_LOG2):
10146 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10148 CASE_FLT_FN (BUILT_IN_LOG10):
10149 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10151 CASE_FLT_FN (BUILT_IN_LOG1P):
10152 if (validate_arg (arg0, REAL_TYPE))
10153 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10154 &dconstm1, NULL, false);
10155 break;
10157 CASE_FLT_FN (BUILT_IN_J0):
10158 if (validate_arg (arg0, REAL_TYPE))
10159 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10160 NULL, NULL, 0);
10161 break;
10163 CASE_FLT_FN (BUILT_IN_J1):
10164 if (validate_arg (arg0, REAL_TYPE))
10165 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10166 NULL, NULL, 0);
10167 break;
10169 CASE_FLT_FN (BUILT_IN_Y0):
10170 if (validate_arg (arg0, REAL_TYPE))
10171 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10172 &dconst0, NULL, false);
10173 break;
10175 CASE_FLT_FN (BUILT_IN_Y1):
10176 if (validate_arg (arg0, REAL_TYPE))
10177 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10178 &dconst0, NULL, false);
10179 break;
10181 CASE_FLT_FN (BUILT_IN_NAN):
10182 case BUILT_IN_NAND32:
10183 case BUILT_IN_NAND64:
10184 case BUILT_IN_NAND128:
10185 return fold_builtin_nan (arg0, type, true);
10187 CASE_FLT_FN (BUILT_IN_NANS):
10188 return fold_builtin_nan (arg0, type, false);
10190 CASE_FLT_FN (BUILT_IN_FLOOR):
10191 return fold_builtin_floor (loc, fndecl, arg0);
10193 CASE_FLT_FN (BUILT_IN_CEIL):
10194 return fold_builtin_ceil (loc, fndecl, arg0);
10196 CASE_FLT_FN (BUILT_IN_TRUNC):
10197 return fold_builtin_trunc (loc, fndecl, arg0);
10199 CASE_FLT_FN (BUILT_IN_ROUND):
10200 return fold_builtin_round (loc, fndecl, arg0);
10202 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10203 CASE_FLT_FN (BUILT_IN_RINT):
10204 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10206 CASE_FLT_FN (BUILT_IN_LCEIL):
10207 CASE_FLT_FN (BUILT_IN_LLCEIL):
10208 CASE_FLT_FN (BUILT_IN_LFLOOR):
10209 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10210 CASE_FLT_FN (BUILT_IN_LROUND):
10211 CASE_FLT_FN (BUILT_IN_LLROUND):
10212 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10214 CASE_FLT_FN (BUILT_IN_LRINT):
10215 CASE_FLT_FN (BUILT_IN_LLRINT):
10216 return fold_fixed_mathfn (loc, fndecl, arg0);
10218 case BUILT_IN_BSWAP32:
10219 case BUILT_IN_BSWAP64:
10220 return fold_builtin_bswap (fndecl, arg0);
10222 CASE_INT_FN (BUILT_IN_FFS):
10223 CASE_INT_FN (BUILT_IN_CLZ):
10224 CASE_INT_FN (BUILT_IN_CTZ):
10225 CASE_INT_FN (BUILT_IN_POPCOUNT):
10226 CASE_INT_FN (BUILT_IN_PARITY):
10227 return fold_builtin_bitop (fndecl, arg0);
10229 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10230 return fold_builtin_signbit (loc, arg0, type);
10232 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10233 return fold_builtin_significand (loc, arg0, type);
10235 CASE_FLT_FN (BUILT_IN_ILOGB):
10236 CASE_FLT_FN (BUILT_IN_LOGB):
10237 return fold_builtin_logb (loc, arg0, type);
10239 case BUILT_IN_ISASCII:
10240 return fold_builtin_isascii (loc, arg0);
10242 case BUILT_IN_TOASCII:
10243 return fold_builtin_toascii (loc, arg0);
10245 case BUILT_IN_ISDIGIT:
10246 return fold_builtin_isdigit (loc, arg0);
10248 CASE_FLT_FN (BUILT_IN_FINITE):
10249 case BUILT_IN_FINITED32:
10250 case BUILT_IN_FINITED64:
10251 case BUILT_IN_FINITED128:
10252 case BUILT_IN_ISFINITE:
10254 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10255 if (ret)
10256 return ret;
10257 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10260 CASE_FLT_FN (BUILT_IN_ISINF):
10261 case BUILT_IN_ISINFD32:
10262 case BUILT_IN_ISINFD64:
10263 case BUILT_IN_ISINFD128:
10265 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10266 if (ret)
10267 return ret;
10268 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10271 case BUILT_IN_ISNORMAL:
10272 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10274 case BUILT_IN_ISINF_SIGN:
10275 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10277 CASE_FLT_FN (BUILT_IN_ISNAN):
10278 case BUILT_IN_ISNAND32:
10279 case BUILT_IN_ISNAND64:
10280 case BUILT_IN_ISNAND128:
10281 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10283 case BUILT_IN_PRINTF:
10284 case BUILT_IN_PRINTF_UNLOCKED:
10285 case BUILT_IN_VPRINTF:
10286 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10288 case BUILT_IN_FREE:
10289 if (integer_zerop (arg0))
10290 return build_empty_stmt (loc);
10291 break;
10293 default:
10294 break;
10297 return NULL_TREE;
10301 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10302 IGNORE is true if the result of the function call is ignored. This
10303 function returns NULL_TREE if no simplification was possible. */
10305 static tree
10306 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10308 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10309 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10311 switch (fcode)
10313 CASE_FLT_FN (BUILT_IN_JN):
10314 if (validate_arg (arg0, INTEGER_TYPE)
10315 && validate_arg (arg1, REAL_TYPE))
10316 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10317 break;
10319 CASE_FLT_FN (BUILT_IN_YN):
10320 if (validate_arg (arg0, INTEGER_TYPE)
10321 && validate_arg (arg1, REAL_TYPE))
10322 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10323 &dconst0, false);
10324 break;
10326 CASE_FLT_FN (BUILT_IN_DREM):
10327 CASE_FLT_FN (BUILT_IN_REMAINDER):
10328 if (validate_arg (arg0, REAL_TYPE)
10329 && validate_arg(arg1, REAL_TYPE))
10330 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10331 break;
10333 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10334 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10335 if (validate_arg (arg0, REAL_TYPE)
10336 && validate_arg(arg1, POINTER_TYPE))
10337 return do_mpfr_lgamma_r (arg0, arg1, type);
10338 break;
10340 CASE_FLT_FN (BUILT_IN_ATAN2):
10341 if (validate_arg (arg0, REAL_TYPE)
10342 && validate_arg(arg1, REAL_TYPE))
10343 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10344 break;
10346 CASE_FLT_FN (BUILT_IN_FDIM):
10347 if (validate_arg (arg0, REAL_TYPE)
10348 && validate_arg(arg1, REAL_TYPE))
10349 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10350 break;
10352 CASE_FLT_FN (BUILT_IN_HYPOT):
10353 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10355 CASE_FLT_FN (BUILT_IN_CPOW):
10356 if (validate_arg (arg0, COMPLEX_TYPE)
10357 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10358 && validate_arg (arg1, COMPLEX_TYPE)
10359 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10360 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10361 break;
10363 CASE_FLT_FN (BUILT_IN_LDEXP):
10364 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10365 CASE_FLT_FN (BUILT_IN_SCALBN):
10366 CASE_FLT_FN (BUILT_IN_SCALBLN):
10367 return fold_builtin_load_exponent (loc, arg0, arg1,
10368 type, /*ldexp=*/false);
10370 CASE_FLT_FN (BUILT_IN_FREXP):
10371 return fold_builtin_frexp (loc, arg0, arg1, type);
10373 CASE_FLT_FN (BUILT_IN_MODF):
10374 return fold_builtin_modf (loc, arg0, arg1, type);
10376 case BUILT_IN_BZERO:
10377 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10379 case BUILT_IN_FPUTS:
10380 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10382 case BUILT_IN_FPUTS_UNLOCKED:
10383 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10385 case BUILT_IN_STRSTR:
10386 return fold_builtin_strstr (loc, arg0, arg1, type);
10388 case BUILT_IN_STRCAT:
10389 return fold_builtin_strcat (loc, arg0, arg1);
10391 case BUILT_IN_STRSPN:
10392 return fold_builtin_strspn (loc, arg0, arg1);
10394 case BUILT_IN_STRCSPN:
10395 return fold_builtin_strcspn (loc, arg0, arg1);
10397 case BUILT_IN_STRCHR:
10398 case BUILT_IN_INDEX:
10399 return fold_builtin_strchr (loc, arg0, arg1, type);
10401 case BUILT_IN_STRRCHR:
10402 case BUILT_IN_RINDEX:
10403 return fold_builtin_strrchr (loc, arg0, arg1, type);
10405 case BUILT_IN_STRCPY:
10406 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10408 case BUILT_IN_STPCPY:
10409 if (ignore)
10411 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10412 if (!fn)
10413 break;
10415 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10417 else
10418 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10419 break;
10421 case BUILT_IN_STRCMP:
10422 return fold_builtin_strcmp (loc, arg0, arg1);
10424 case BUILT_IN_STRPBRK:
10425 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10427 case BUILT_IN_EXPECT:
10428 return fold_builtin_expect (loc, arg0, arg1);
10430 CASE_FLT_FN (BUILT_IN_POW):
10431 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10433 CASE_FLT_FN (BUILT_IN_POWI):
10434 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10436 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10437 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10439 CASE_FLT_FN (BUILT_IN_FMIN):
10440 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10442 CASE_FLT_FN (BUILT_IN_FMAX):
10443 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10445 case BUILT_IN_ISGREATER:
10446 return fold_builtin_unordered_cmp (loc, fndecl,
10447 arg0, arg1, UNLE_EXPR, LE_EXPR);
10448 case BUILT_IN_ISGREATEREQUAL:
10449 return fold_builtin_unordered_cmp (loc, fndecl,
10450 arg0, arg1, UNLT_EXPR, LT_EXPR);
10451 case BUILT_IN_ISLESS:
10452 return fold_builtin_unordered_cmp (loc, fndecl,
10453 arg0, arg1, UNGE_EXPR, GE_EXPR);
10454 case BUILT_IN_ISLESSEQUAL:
10455 return fold_builtin_unordered_cmp (loc, fndecl,
10456 arg0, arg1, UNGT_EXPR, GT_EXPR);
10457 case BUILT_IN_ISLESSGREATER:
10458 return fold_builtin_unordered_cmp (loc, fndecl,
10459 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10460 case BUILT_IN_ISUNORDERED:
10461 return fold_builtin_unordered_cmp (loc, fndecl,
10462 arg0, arg1, UNORDERED_EXPR,
10463 NOP_EXPR);
10465 /* We do the folding for va_start in the expander. */
10466 case BUILT_IN_VA_START:
10467 break;
10469 case BUILT_IN_SPRINTF:
10470 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10472 case BUILT_IN_OBJECT_SIZE:
10473 return fold_builtin_object_size (arg0, arg1);
10475 case BUILT_IN_PRINTF:
10476 case BUILT_IN_PRINTF_UNLOCKED:
10477 case BUILT_IN_VPRINTF:
10478 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10480 case BUILT_IN_PRINTF_CHK:
10481 case BUILT_IN_VPRINTF_CHK:
10482 if (!validate_arg (arg0, INTEGER_TYPE)
10483 || TREE_SIDE_EFFECTS (arg0))
10484 return NULL_TREE;
10485 else
10486 return fold_builtin_printf (loc, fndecl,
10487 arg1, NULL_TREE, ignore, fcode);
10488 break;
10490 case BUILT_IN_FPRINTF:
10491 case BUILT_IN_FPRINTF_UNLOCKED:
10492 case BUILT_IN_VFPRINTF:
10493 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10494 ignore, fcode);
10496 default:
10497 break;
10499 return NULL_TREE;
10502 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10503 and ARG2. IGNORE is true if the result of the function call is ignored.
10504 This function returns NULL_TREE if no simplification was possible. */
10506 static tree
10507 fold_builtin_3 (location_t loc, tree fndecl,
10508 tree arg0, tree arg1, tree arg2, bool ignore)
10510 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10511 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10512 switch (fcode)
10515 CASE_FLT_FN (BUILT_IN_SINCOS):
10516 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10518 CASE_FLT_FN (BUILT_IN_FMA):
10519 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10520 break;
10522 CASE_FLT_FN (BUILT_IN_REMQUO):
10523 if (validate_arg (arg0, REAL_TYPE)
10524 && validate_arg(arg1, REAL_TYPE)
10525 && validate_arg(arg2, POINTER_TYPE))
10526 return do_mpfr_remquo (arg0, arg1, arg2);
10527 break;
10529 case BUILT_IN_MEMSET:
10530 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10532 case BUILT_IN_BCOPY:
10533 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10534 void_type_node, true, /*endp=*/3);
10536 case BUILT_IN_MEMCPY:
10537 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10538 type, ignore, /*endp=*/0);
10540 case BUILT_IN_MEMPCPY:
10541 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10542 type, ignore, /*endp=*/1);
10544 case BUILT_IN_MEMMOVE:
10545 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10546 type, ignore, /*endp=*/3);
10548 case BUILT_IN_STRNCAT:
10549 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10551 case BUILT_IN_STRNCPY:
10552 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10554 case BUILT_IN_STRNCMP:
10555 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10557 case BUILT_IN_MEMCHR:
10558 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10560 case BUILT_IN_BCMP:
10561 case BUILT_IN_MEMCMP:
10562 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10564 case BUILT_IN_SPRINTF:
10565 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10567 case BUILT_IN_SNPRINTF:
10568 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10570 case BUILT_IN_STRCPY_CHK:
10571 case BUILT_IN_STPCPY_CHK:
10572 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10573 ignore, fcode);
10575 case BUILT_IN_STRCAT_CHK:
10576 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10578 case BUILT_IN_PRINTF_CHK:
10579 case BUILT_IN_VPRINTF_CHK:
10580 if (!validate_arg (arg0, INTEGER_TYPE)
10581 || TREE_SIDE_EFFECTS (arg0))
10582 return NULL_TREE;
10583 else
10584 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10585 break;
10587 case BUILT_IN_FPRINTF:
10588 case BUILT_IN_FPRINTF_UNLOCKED:
10589 case BUILT_IN_VFPRINTF:
10590 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10591 ignore, fcode);
10593 case BUILT_IN_FPRINTF_CHK:
10594 case BUILT_IN_VFPRINTF_CHK:
10595 if (!validate_arg (arg1, INTEGER_TYPE)
10596 || TREE_SIDE_EFFECTS (arg1))
10597 return NULL_TREE;
10598 else
10599 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10600 ignore, fcode);
10602 default:
10603 break;
10605 return NULL_TREE;
10608 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10609 ARG2, and ARG3. IGNORE is true if the result of the function call is
10610 ignored. This function returns NULL_TREE if no simplification was
10611 possible. */
10613 static tree
10614 fold_builtin_4 (location_t loc, tree fndecl,
10615 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10617 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10619 switch (fcode)
10621 case BUILT_IN_MEMCPY_CHK:
10622 case BUILT_IN_MEMPCPY_CHK:
10623 case BUILT_IN_MEMMOVE_CHK:
10624 case BUILT_IN_MEMSET_CHK:
10625 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10626 NULL_TREE, ignore,
10627 DECL_FUNCTION_CODE (fndecl));
10629 case BUILT_IN_STRNCPY_CHK:
10630 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10632 case BUILT_IN_STRNCAT_CHK:
10633 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10635 case BUILT_IN_SNPRINTF:
10636 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10638 case BUILT_IN_FPRINTF_CHK:
10639 case BUILT_IN_VFPRINTF_CHK:
10640 if (!validate_arg (arg1, INTEGER_TYPE)
10641 || TREE_SIDE_EFFECTS (arg1))
10642 return NULL_TREE;
10643 else
10644 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10645 ignore, fcode);
10646 break;
10648 default:
10649 break;
10651 return NULL_TREE;
10654 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10655 arguments, where NARGS <= 4. IGNORE is true if the result of the
10656 function call is ignored. This function returns NULL_TREE if no
10657 simplification was possible. Note that this only folds builtins with
10658 fixed argument patterns. Foldings that do varargs-to-varargs
10659 transformations, or that match calls with more than 4 arguments,
10660 need to be handled with fold_builtin_varargs instead. */
10662 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10664 static tree
10665 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10667 tree ret = NULL_TREE;
10669 switch (nargs)
10671 case 0:
10672 ret = fold_builtin_0 (loc, fndecl, ignore);
10673 break;
10674 case 1:
10675 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10676 break;
10677 case 2:
10678 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10679 break;
10680 case 3:
10681 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10682 break;
10683 case 4:
10684 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10685 ignore);
10686 break;
10687 default:
10688 break;
10690 if (ret)
10692 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10693 SET_EXPR_LOCATION (ret, loc);
10694 TREE_NO_WARNING (ret) = 1;
10695 return ret;
10697 return NULL_TREE;
10700 /* Builtins with folding operations that operate on "..." arguments
10701 need special handling; we need to store the arguments in a convenient
10702 data structure before attempting any folding. Fortunately there are
10703 only a few builtins that fall into this category. FNDECL is the
10704 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10705 result of the function call is ignored. */
10707 static tree
10708 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10709 bool ignore ATTRIBUTE_UNUSED)
10711 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10712 tree ret = NULL_TREE;
10714 switch (fcode)
10716 case BUILT_IN_SPRINTF_CHK:
10717 case BUILT_IN_VSPRINTF_CHK:
10718 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10719 break;
10721 case BUILT_IN_SNPRINTF_CHK:
10722 case BUILT_IN_VSNPRINTF_CHK:
10723 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10724 break;
10726 case BUILT_IN_FPCLASSIFY:
10727 ret = fold_builtin_fpclassify (loc, exp);
10728 break;
10730 default:
10731 break;
10733 if (ret)
10735 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10736 SET_EXPR_LOCATION (ret, loc);
10737 TREE_NO_WARNING (ret) = 1;
10738 return ret;
10740 return NULL_TREE;
10743 /* Return true if FNDECL shouldn't be folded right now.
10744 If a built-in function has an inline attribute always_inline
10745 wrapper, defer folding it after always_inline functions have
10746 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10747 might not be performed. */
10749 static bool
10750 avoid_folding_inline_builtin (tree fndecl)
10752 return (DECL_DECLARED_INLINE_P (fndecl)
10753 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10754 && cfun
10755 && !cfun->always_inline_functions_inlined
10756 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10759 /* A wrapper function for builtin folding that prevents warnings for
10760 "statement without effect" and the like, caused by removing the
10761 call node earlier than the warning is generated. */
10763 tree
10764 fold_call_expr (location_t loc, tree exp, bool ignore)
10766 tree ret = NULL_TREE;
10767 tree fndecl = get_callee_fndecl (exp);
10768 if (fndecl
10769 && TREE_CODE (fndecl) == FUNCTION_DECL
10770 && DECL_BUILT_IN (fndecl)
10771 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10772 yet. Defer folding until we see all the arguments
10773 (after inlining). */
10774 && !CALL_EXPR_VA_ARG_PACK (exp))
10776 int nargs = call_expr_nargs (exp);
10778 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10779 instead last argument is __builtin_va_arg_pack (). Defer folding
10780 even in that case, until arguments are finalized. */
10781 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10783 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10784 if (fndecl2
10785 && TREE_CODE (fndecl2) == FUNCTION_DECL
10786 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10787 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10788 return NULL_TREE;
10791 if (avoid_folding_inline_builtin (fndecl))
10792 return NULL_TREE;
10794 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10795 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10796 CALL_EXPR_ARGP (exp), ignore);
10797 else
10799 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10801 tree *args = CALL_EXPR_ARGP (exp);
10802 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10804 if (!ret)
10805 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10806 if (ret)
10807 return ret;
10810 return NULL_TREE;
10813 /* Conveniently construct a function call expression. FNDECL names the
10814 function to be called and N arguments are passed in the array
10815 ARGARRAY. */
10817 tree
10818 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10820 tree fntype = TREE_TYPE (fndecl);
10821 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10823 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10826 /* Conveniently construct a function call expression. FNDECL names the
10827 function to be called and the arguments are passed in the vector
10828 VEC. */
10830 tree
10831 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10833 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10834 VEC_address (tree, vec));
10838 /* Conveniently construct a function call expression. FNDECL names the
10839 function to be called, N is the number of arguments, and the "..."
10840 parameters are the argument expressions. */
10842 tree
10843 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10845 va_list ap;
10846 tree *argarray = XALLOCAVEC (tree, n);
10847 int i;
10849 va_start (ap, n);
10850 for (i = 0; i < n; i++)
10851 argarray[i] = va_arg (ap, tree);
10852 va_end (ap);
10853 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10856 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10857 varargs macros aren't supported by all bootstrap compilers. */
10859 tree
10860 build_call_expr (tree fndecl, int n, ...)
10862 va_list ap;
10863 tree *argarray = XALLOCAVEC (tree, n);
10864 int i;
10866 va_start (ap, n);
10867 for (i = 0; i < n; i++)
10868 argarray[i] = va_arg (ap, tree);
10869 va_end (ap);
10870 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10873 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10874 N arguments are passed in the array ARGARRAY. */
10876 tree
10877 fold_builtin_call_array (location_t loc, tree type,
10878 tree fn,
10879 int n,
10880 tree *argarray)
10882 tree ret = NULL_TREE;
10883 tree exp;
10885 if (TREE_CODE (fn) == ADDR_EXPR)
10887 tree fndecl = TREE_OPERAND (fn, 0);
10888 if (TREE_CODE (fndecl) == FUNCTION_DECL
10889 && DECL_BUILT_IN (fndecl))
10891 /* If last argument is __builtin_va_arg_pack (), arguments to this
10892 function are not finalized yet. Defer folding until they are. */
10893 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10895 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10896 if (fndecl2
10897 && TREE_CODE (fndecl2) == FUNCTION_DECL
10898 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10899 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10900 return build_call_array_loc (loc, type, fn, n, argarray);
10902 if (avoid_folding_inline_builtin (fndecl))
10903 return build_call_array_loc (loc, type, fn, n, argarray);
10904 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10906 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10907 if (ret)
10908 return ret;
10910 return build_call_array_loc (loc, type, fn, n, argarray);
10912 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10914 /* First try the transformations that don't require consing up
10915 an exp. */
10916 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10917 if (ret)
10918 return ret;
10921 /* If we got this far, we need to build an exp. */
10922 exp = build_call_array_loc (loc, type, fn, n, argarray);
10923 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10924 return ret ? ret : exp;
10928 return build_call_array_loc (loc, type, fn, n, argarray);
10931 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10932 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10933 of arguments in ARGS to be omitted. OLDNARGS is the number of
10934 elements in ARGS. */
10936 static tree
10937 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10938 int skip, tree fndecl, int n, va_list newargs)
10940 int nargs = oldnargs - skip + n;
10941 tree *buffer;
10943 if (n > 0)
10945 int i, j;
10947 buffer = XALLOCAVEC (tree, nargs);
10948 for (i = 0; i < n; i++)
10949 buffer[i] = va_arg (newargs, tree);
10950 for (j = skip; j < oldnargs; j++, i++)
10951 buffer[i] = args[j];
10953 else
10954 buffer = args + skip;
10956 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10959 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10960 list ARGS along with N new arguments specified as the "..."
10961 parameters. SKIP is the number of arguments in ARGS to be omitted.
10962 OLDNARGS is the number of elements in ARGS. */
10964 static tree
10965 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
10966 int skip, tree fndecl, int n, ...)
10968 va_list ap;
10969 tree t;
10971 va_start (ap, n);
10972 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
10973 va_end (ap);
10975 return t;
10978 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10979 along with N new arguments specified as the "..." parameters. SKIP
10980 is the number of arguments in EXP to be omitted. This function is used
10981 to do varargs-to-varargs transformations. */
10983 static tree
10984 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10986 va_list ap;
10987 tree t;
10989 va_start (ap, n);
10990 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10991 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10992 va_end (ap);
10994 return t;
10997 /* Validate a single argument ARG against a tree code CODE representing
10998 a type. */
11000 static bool
11001 validate_arg (const_tree arg, enum tree_code code)
11003 if (!arg)
11004 return false;
11005 else if (code == POINTER_TYPE)
11006 return POINTER_TYPE_P (TREE_TYPE (arg));
11007 else if (code == INTEGER_TYPE)
11008 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11009 return code == TREE_CODE (TREE_TYPE (arg));
11012 /* This function validates the types of a function call argument list
11013 against a specified list of tree_codes. If the last specifier is a 0,
11014 that represents an ellipses, otherwise the last specifier must be a
11015 VOID_TYPE.
11017 This is the GIMPLE version of validate_arglist. Eventually we want to
11018 completely convert builtins.c to work from GIMPLEs and the tree based
11019 validate_arglist will then be removed. */
11021 bool
11022 validate_gimple_arglist (const_gimple call, ...)
11024 enum tree_code code;
11025 bool res = 0;
11026 va_list ap;
11027 const_tree arg;
11028 size_t i;
11030 va_start (ap, call);
11031 i = 0;
11035 code = (enum tree_code) va_arg (ap, int);
11036 switch (code)
11038 case 0:
11039 /* This signifies an ellipses, any further arguments are all ok. */
11040 res = true;
11041 goto end;
11042 case VOID_TYPE:
11043 /* This signifies an endlink, if no arguments remain, return
11044 true, otherwise return false. */
11045 res = (i == gimple_call_num_args (call));
11046 goto end;
11047 default:
11048 /* If no parameters remain or the parameter's code does not
11049 match the specified code, return false. Otherwise continue
11050 checking any remaining arguments. */
11051 arg = gimple_call_arg (call, i++);
11052 if (!validate_arg (arg, code))
11053 goto end;
11054 break;
11057 while (1);
11059 /* We need gotos here since we can only have one VA_CLOSE in a
11060 function. */
11061 end: ;
11062 va_end (ap);
11064 return res;
11067 /* This function validates the types of a function call argument list
11068 against a specified list of tree_codes. If the last specifier is a 0,
11069 that represents an ellipses, otherwise the last specifier must be a
11070 VOID_TYPE. */
11072 bool
11073 validate_arglist (const_tree callexpr, ...)
11075 enum tree_code code;
11076 bool res = 0;
11077 va_list ap;
11078 const_call_expr_arg_iterator iter;
11079 const_tree arg;
11081 va_start (ap, callexpr);
11082 init_const_call_expr_arg_iterator (callexpr, &iter);
11086 code = (enum tree_code) va_arg (ap, int);
11087 switch (code)
11089 case 0:
11090 /* This signifies an ellipses, any further arguments are all ok. */
11091 res = true;
11092 goto end;
11093 case VOID_TYPE:
11094 /* This signifies an endlink, if no arguments remain, return
11095 true, otherwise return false. */
11096 res = !more_const_call_expr_args_p (&iter);
11097 goto end;
11098 default:
11099 /* If no parameters remain or the parameter's code does not
11100 match the specified code, return false. Otherwise continue
11101 checking any remaining arguments. */
11102 arg = next_const_call_expr_arg (&iter);
11103 if (!validate_arg (arg, code))
11104 goto end;
11105 break;
11108 while (1);
11110 /* We need gotos here since we can only have one VA_CLOSE in a
11111 function. */
11112 end: ;
11113 va_end (ap);
11115 return res;
11118 /* Default target-specific builtin expander that does nothing. */
11121 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11122 rtx target ATTRIBUTE_UNUSED,
11123 rtx subtarget ATTRIBUTE_UNUSED,
11124 enum machine_mode mode ATTRIBUTE_UNUSED,
11125 int ignore ATTRIBUTE_UNUSED)
11127 return NULL_RTX;
11130 /* Returns true is EXP represents data that would potentially reside
11131 in a readonly section. */
11133 static bool
11134 readonly_data_expr (tree exp)
11136 STRIP_NOPS (exp);
11138 if (TREE_CODE (exp) != ADDR_EXPR)
11139 return false;
11141 exp = get_base_address (TREE_OPERAND (exp, 0));
11142 if (!exp)
11143 return false;
11145 /* Make sure we call decl_readonly_section only for trees it
11146 can handle (since it returns true for everything it doesn't
11147 understand). */
11148 if (TREE_CODE (exp) == STRING_CST
11149 || TREE_CODE (exp) == CONSTRUCTOR
11150 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11151 return decl_readonly_section (exp, 0);
11152 else
11153 return false;
11156 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11157 to the call, and TYPE is its return type.
11159 Return NULL_TREE if no simplification was possible, otherwise return the
11160 simplified form of the call as a tree.
11162 The simplified form may be a constant or other expression which
11163 computes the same value, but in a more efficient manner (including
11164 calls to other builtin functions).
11166 The call may contain arguments which need to be evaluated, but
11167 which are not useful to determine the result of the call. In
11168 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11169 COMPOUND_EXPR will be an argument which must be evaluated.
11170 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11171 COMPOUND_EXPR in the chain will contain the tree for the simplified
11172 form of the builtin function call. */
11174 static tree
11175 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11177 if (!validate_arg (s1, POINTER_TYPE)
11178 || !validate_arg (s2, POINTER_TYPE))
11179 return NULL_TREE;
11180 else
11182 tree fn;
11183 const char *p1, *p2;
11185 p2 = c_getstr (s2);
11186 if (p2 == NULL)
11187 return NULL_TREE;
11189 p1 = c_getstr (s1);
11190 if (p1 != NULL)
11192 const char *r = strstr (p1, p2);
11193 tree tem;
11195 if (r == NULL)
11196 return build_int_cst (TREE_TYPE (s1), 0);
11198 /* Return an offset into the constant string argument. */
11199 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11200 s1, size_int (r - p1));
11201 return fold_convert_loc (loc, type, tem);
11204 /* The argument is const char *, and the result is char *, so we need
11205 a type conversion here to avoid a warning. */
11206 if (p2[0] == '\0')
11207 return fold_convert_loc (loc, type, s1);
11209 if (p2[1] != '\0')
11210 return NULL_TREE;
11212 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11213 if (!fn)
11214 return NULL_TREE;
11216 /* New argument list transforming strstr(s1, s2) to
11217 strchr(s1, s2[0]). */
11218 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11222 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11223 the call, and TYPE is its return type.
11225 Return NULL_TREE if no simplification was possible, otherwise return the
11226 simplified form of the call as a tree.
11228 The simplified form may be a constant or other expression which
11229 computes the same value, but in a more efficient manner (including
11230 calls to other builtin functions).
11232 The call may contain arguments which need to be evaluated, but
11233 which are not useful to determine the result of the call. In
11234 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11235 COMPOUND_EXPR will be an argument which must be evaluated.
11236 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11237 COMPOUND_EXPR in the chain will contain the tree for the simplified
11238 form of the builtin function call. */
11240 static tree
11241 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11243 if (!validate_arg (s1, POINTER_TYPE)
11244 || !validate_arg (s2, INTEGER_TYPE))
11245 return NULL_TREE;
11246 else
11248 const char *p1;
11250 if (TREE_CODE (s2) != INTEGER_CST)
11251 return NULL_TREE;
11253 p1 = c_getstr (s1);
11254 if (p1 != NULL)
11256 char c;
11257 const char *r;
11258 tree tem;
11260 if (target_char_cast (s2, &c))
11261 return NULL_TREE;
11263 r = strchr (p1, c);
11265 if (r == NULL)
11266 return build_int_cst (TREE_TYPE (s1), 0);
11268 /* Return an offset into the constant string argument. */
11269 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11270 s1, size_int (r - p1));
11271 return fold_convert_loc (loc, type, tem);
11273 return NULL_TREE;
11277 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11278 the call, and TYPE is its return type.
11280 Return NULL_TREE if no simplification was possible, otherwise return the
11281 simplified form of the call as a tree.
11283 The simplified form may be a constant or other expression which
11284 computes the same value, but in a more efficient manner (including
11285 calls to other builtin functions).
11287 The call may contain arguments which need to be evaluated, but
11288 which are not useful to determine the result of the call. In
11289 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11290 COMPOUND_EXPR will be an argument which must be evaluated.
11291 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11292 COMPOUND_EXPR in the chain will contain the tree for the simplified
11293 form of the builtin function call. */
11295 static tree
11296 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11298 if (!validate_arg (s1, POINTER_TYPE)
11299 || !validate_arg (s2, INTEGER_TYPE))
11300 return NULL_TREE;
11301 else
11303 tree fn;
11304 const char *p1;
11306 if (TREE_CODE (s2) != INTEGER_CST)
11307 return NULL_TREE;
11309 p1 = c_getstr (s1);
11310 if (p1 != NULL)
11312 char c;
11313 const char *r;
11314 tree tem;
11316 if (target_char_cast (s2, &c))
11317 return NULL_TREE;
11319 r = strrchr (p1, c);
11321 if (r == NULL)
11322 return build_int_cst (TREE_TYPE (s1), 0);
11324 /* Return an offset into the constant string argument. */
11325 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11326 s1, size_int (r - p1));
11327 return fold_convert_loc (loc, type, tem);
11330 if (! integer_zerop (s2))
11331 return NULL_TREE;
11333 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11334 if (!fn)
11335 return NULL_TREE;
11337 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11338 return build_call_expr_loc (loc, fn, 2, s1, s2);
11342 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11343 to the call, and TYPE is its return type.
11345 Return NULL_TREE if no simplification was possible, otherwise return the
11346 simplified form of the call as a tree.
11348 The simplified form may be a constant or other expression which
11349 computes the same value, but in a more efficient manner (including
11350 calls to other builtin functions).
11352 The call may contain arguments which need to be evaluated, but
11353 which are not useful to determine the result of the call. In
11354 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11355 COMPOUND_EXPR will be an argument which must be evaluated.
11356 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11357 COMPOUND_EXPR in the chain will contain the tree for the simplified
11358 form of the builtin function call. */
11360 static tree
11361 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11363 if (!validate_arg (s1, POINTER_TYPE)
11364 || !validate_arg (s2, POINTER_TYPE))
11365 return NULL_TREE;
11366 else
11368 tree fn;
11369 const char *p1, *p2;
11371 p2 = c_getstr (s2);
11372 if (p2 == NULL)
11373 return NULL_TREE;
11375 p1 = c_getstr (s1);
11376 if (p1 != NULL)
11378 const char *r = strpbrk (p1, p2);
11379 tree tem;
11381 if (r == NULL)
11382 return build_int_cst (TREE_TYPE (s1), 0);
11384 /* Return an offset into the constant string argument. */
11385 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11386 s1, size_int (r - p1));
11387 return fold_convert_loc (loc, type, tem);
11390 if (p2[0] == '\0')
11391 /* strpbrk(x, "") == NULL.
11392 Evaluate and ignore s1 in case it had side-effects. */
11393 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11395 if (p2[1] != '\0')
11396 return NULL_TREE; /* Really call strpbrk. */
11398 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11399 if (!fn)
11400 return NULL_TREE;
11402 /* New argument list transforming strpbrk(s1, s2) to
11403 strchr(s1, s2[0]). */
11404 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11408 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11409 to the call.
11411 Return NULL_TREE if no simplification was possible, otherwise return the
11412 simplified form of the call as a tree.
11414 The simplified form may be a constant or other expression which
11415 computes the same value, but in a more efficient manner (including
11416 calls to other builtin functions).
11418 The call may contain arguments which need to be evaluated, but
11419 which are not useful to determine the result of the call. In
11420 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11421 COMPOUND_EXPR will be an argument which must be evaluated.
11422 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11423 COMPOUND_EXPR in the chain will contain the tree for the simplified
11424 form of the builtin function call. */
11426 static tree
11427 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11429 if (!validate_arg (dst, POINTER_TYPE)
11430 || !validate_arg (src, POINTER_TYPE))
11431 return NULL_TREE;
11432 else
11434 const char *p = c_getstr (src);
11436 /* If the string length is zero, return the dst parameter. */
11437 if (p && *p == '\0')
11438 return dst;
11440 if (optimize_insn_for_speed_p ())
11442 /* See if we can store by pieces into (dst + strlen(dst)). */
11443 tree newdst, call;
11444 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11445 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11447 if (!strlen_fn || !strcpy_fn)
11448 return NULL_TREE;
11450 /* If we don't have a movstr we don't want to emit an strcpy
11451 call. We have to do that if the length of the source string
11452 isn't computable (in that case we can use memcpy probably
11453 later expanding to a sequence of mov instructions). If we
11454 have movstr instructions we can emit strcpy calls. */
11455 if (!HAVE_movstr)
11457 tree len = c_strlen (src, 1);
11458 if (! len || TREE_SIDE_EFFECTS (len))
11459 return NULL_TREE;
11462 /* Stabilize the argument list. */
11463 dst = builtin_save_expr (dst);
11465 /* Create strlen (dst). */
11466 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11467 /* Create (dst p+ strlen (dst)). */
11469 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11470 TREE_TYPE (dst), dst, newdst);
11471 newdst = builtin_save_expr (newdst);
11473 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11474 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11476 return NULL_TREE;
11480 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11481 arguments to the call.
11483 Return NULL_TREE if no simplification was possible, otherwise return the
11484 simplified form of the call as a tree.
11486 The simplified form may be a constant or other expression which
11487 computes the same value, but in a more efficient manner (including
11488 calls to other builtin functions).
11490 The call may contain arguments which need to be evaluated, but
11491 which are not useful to determine the result of the call. In
11492 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11493 COMPOUND_EXPR will be an argument which must be evaluated.
11494 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11495 COMPOUND_EXPR in the chain will contain the tree for the simplified
11496 form of the builtin function call. */
11498 static tree
11499 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11501 if (!validate_arg (dst, POINTER_TYPE)
11502 || !validate_arg (src, POINTER_TYPE)
11503 || !validate_arg (len, INTEGER_TYPE))
11504 return NULL_TREE;
11505 else
11507 const char *p = c_getstr (src);
11509 /* If the requested length is zero, or the src parameter string
11510 length is zero, return the dst parameter. */
11511 if (integer_zerop (len) || (p && *p == '\0'))
11512 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11514 /* If the requested len is greater than or equal to the string
11515 length, call strcat. */
11516 if (TREE_CODE (len) == INTEGER_CST && p
11517 && compare_tree_int (len, strlen (p)) >= 0)
11519 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11521 /* If the replacement _DECL isn't initialized, don't do the
11522 transformation. */
11523 if (!fn)
11524 return NULL_TREE;
11526 return build_call_expr_loc (loc, fn, 2, dst, src);
11528 return NULL_TREE;
11532 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11533 to the call.
11535 Return NULL_TREE if no simplification was possible, otherwise return the
11536 simplified form of the call as a tree.
11538 The simplified form may be a constant or other expression which
11539 computes the same value, but in a more efficient manner (including
11540 calls to other builtin functions).
11542 The call may contain arguments which need to be evaluated, but
11543 which are not useful to determine the result of the call. In
11544 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11545 COMPOUND_EXPR will be an argument which must be evaluated.
11546 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11547 COMPOUND_EXPR in the chain will contain the tree for the simplified
11548 form of the builtin function call. */
11550 static tree
11551 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11553 if (!validate_arg (s1, POINTER_TYPE)
11554 || !validate_arg (s2, POINTER_TYPE))
11555 return NULL_TREE;
11556 else
11558 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11560 /* If both arguments are constants, evaluate at compile-time. */
11561 if (p1 && p2)
11563 const size_t r = strspn (p1, p2);
11564 return size_int (r);
11567 /* If either argument is "", return NULL_TREE. */
11568 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11569 /* Evaluate and ignore both arguments in case either one has
11570 side-effects. */
11571 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11572 s1, s2);
11573 return NULL_TREE;
11577 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11578 to the call.
11580 Return NULL_TREE if no simplification was possible, otherwise return the
11581 simplified form of the call as a tree.
11583 The simplified form may be a constant or other expression which
11584 computes the same value, but in a more efficient manner (including
11585 calls to other builtin functions).
11587 The call may contain arguments which need to be evaluated, but
11588 which are not useful to determine the result of the call. In
11589 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11590 COMPOUND_EXPR will be an argument which must be evaluated.
11591 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11592 COMPOUND_EXPR in the chain will contain the tree for the simplified
11593 form of the builtin function call. */
11595 static tree
11596 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11598 if (!validate_arg (s1, POINTER_TYPE)
11599 || !validate_arg (s2, POINTER_TYPE))
11600 return NULL_TREE;
11601 else
11603 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11605 /* If both arguments are constants, evaluate at compile-time. */
11606 if (p1 && p2)
11608 const size_t r = strcspn (p1, p2);
11609 return size_int (r);
11612 /* If the first argument is "", return NULL_TREE. */
11613 if (p1 && *p1 == '\0')
11615 /* Evaluate and ignore argument s2 in case it has
11616 side-effects. */
11617 return omit_one_operand_loc (loc, size_type_node,
11618 size_zero_node, s2);
11621 /* If the second argument is "", return __builtin_strlen(s1). */
11622 if (p2 && *p2 == '\0')
11624 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11626 /* If the replacement _DECL isn't initialized, don't do the
11627 transformation. */
11628 if (!fn)
11629 return NULL_TREE;
11631 return build_call_expr_loc (loc, fn, 1, s1);
11633 return NULL_TREE;
11637 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11638 to the call. IGNORE is true if the value returned
11639 by the builtin will be ignored. UNLOCKED is true is true if this
11640 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11641 the known length of the string. Return NULL_TREE if no simplification
11642 was possible. */
11644 tree
11645 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11646 bool ignore, bool unlocked, tree len)
11648 /* If we're using an unlocked function, assume the other unlocked
11649 functions exist explicitly. */
11650 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11651 : implicit_built_in_decls[BUILT_IN_FPUTC];
11652 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11653 : implicit_built_in_decls[BUILT_IN_FWRITE];
11655 /* If the return value is used, don't do the transformation. */
11656 if (!ignore)
11657 return NULL_TREE;
11659 /* Verify the arguments in the original call. */
11660 if (!validate_arg (arg0, POINTER_TYPE)
11661 || !validate_arg (arg1, POINTER_TYPE))
11662 return NULL_TREE;
11664 if (! len)
11665 len = c_strlen (arg0, 0);
11667 /* Get the length of the string passed to fputs. If the length
11668 can't be determined, punt. */
11669 if (!len
11670 || TREE_CODE (len) != INTEGER_CST)
11671 return NULL_TREE;
11673 switch (compare_tree_int (len, 1))
11675 case -1: /* length is 0, delete the call entirely . */
11676 return omit_one_operand_loc (loc, integer_type_node,
11677 integer_zero_node, arg1);;
11679 case 0: /* length is 1, call fputc. */
11681 const char *p = c_getstr (arg0);
11683 if (p != NULL)
11685 if (fn_fputc)
11686 return build_call_expr_loc (loc, fn_fputc, 2,
11687 build_int_cst (NULL_TREE, p[0]), arg1);
11688 else
11689 return NULL_TREE;
11692 /* FALLTHROUGH */
11693 case 1: /* length is greater than 1, call fwrite. */
11695 /* If optimizing for size keep fputs. */
11696 if (optimize_function_for_size_p (cfun))
11697 return NULL_TREE;
11698 /* New argument list transforming fputs(string, stream) to
11699 fwrite(string, 1, len, stream). */
11700 if (fn_fwrite)
11701 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11702 size_one_node, len, arg1);
11703 else
11704 return NULL_TREE;
11706 default:
11707 gcc_unreachable ();
11709 return NULL_TREE;
11712 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11713 produced. False otherwise. This is done so that we don't output the error
11714 or warning twice or three times. */
11716 bool
11717 fold_builtin_next_arg (tree exp, bool va_start_p)
11719 tree fntype = TREE_TYPE (current_function_decl);
11720 int nargs = call_expr_nargs (exp);
11721 tree arg;
11723 if (!stdarg_p (fntype))
11725 error ("%<va_start%> used in function with fixed args");
11726 return true;
11729 if (va_start_p)
11731 if (va_start_p && (nargs != 2))
11733 error ("wrong number of arguments to function %<va_start%>");
11734 return true;
11736 arg = CALL_EXPR_ARG (exp, 1);
11738 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11739 when we checked the arguments and if needed issued a warning. */
11740 else
11742 if (nargs == 0)
11744 /* Evidently an out of date version of <stdarg.h>; can't validate
11745 va_start's second argument, but can still work as intended. */
11746 warning (0, "%<__builtin_next_arg%> called without an argument");
11747 return true;
11749 else if (nargs > 1)
11751 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11752 return true;
11754 arg = CALL_EXPR_ARG (exp, 0);
11757 if (TREE_CODE (arg) == SSA_NAME)
11758 arg = SSA_NAME_VAR (arg);
11760 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11761 or __builtin_next_arg (0) the first time we see it, after checking
11762 the arguments and if needed issuing a warning. */
11763 if (!integer_zerop (arg))
11765 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11767 /* Strip off all nops for the sake of the comparison. This
11768 is not quite the same as STRIP_NOPS. It does more.
11769 We must also strip off INDIRECT_EXPR for C++ reference
11770 parameters. */
11771 while (CONVERT_EXPR_P (arg)
11772 || TREE_CODE (arg) == INDIRECT_REF)
11773 arg = TREE_OPERAND (arg, 0);
11774 if (arg != last_parm)
11776 /* FIXME: Sometimes with the tree optimizers we can get the
11777 not the last argument even though the user used the last
11778 argument. We just warn and set the arg to be the last
11779 argument so that we will get wrong-code because of
11780 it. */
11781 warning (0, "second parameter of %<va_start%> not last named argument");
11784 /* Undefined by C99 7.15.1.4p4 (va_start):
11785 "If the parameter parmN is declared with the register storage
11786 class, with a function or array type, or with a type that is
11787 not compatible with the type that results after application of
11788 the default argument promotions, the behavior is undefined."
11790 else if (DECL_REGISTER (arg))
11791 warning (0, "undefined behaviour when second parameter of "
11792 "%<va_start%> is declared with %<register%> storage");
11794 /* We want to verify the second parameter just once before the tree
11795 optimizers are run and then avoid keeping it in the tree,
11796 as otherwise we could warn even for correct code like:
11797 void foo (int i, ...)
11798 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11799 if (va_start_p)
11800 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11801 else
11802 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11804 return false;
11808 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11809 ORIG may be null if this is a 2-argument call. We don't attempt to
11810 simplify calls with more than 3 arguments.
11812 Return NULL_TREE if no simplification was possible, otherwise return the
11813 simplified form of the call as a tree. If IGNORED is true, it means that
11814 the caller does not use the returned value of the function. */
11816 static tree
11817 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11818 tree orig, int ignored)
11820 tree call, retval;
11821 const char *fmt_str = NULL;
11823 /* Verify the required arguments in the original call. We deal with two
11824 types of sprintf() calls: 'sprintf (str, fmt)' and
11825 'sprintf (dest, "%s", orig)'. */
11826 if (!validate_arg (dest, POINTER_TYPE)
11827 || !validate_arg (fmt, POINTER_TYPE))
11828 return NULL_TREE;
11829 if (orig && !validate_arg (orig, POINTER_TYPE))
11830 return NULL_TREE;
11832 /* Check whether the format is a literal string constant. */
11833 fmt_str = c_getstr (fmt);
11834 if (fmt_str == NULL)
11835 return NULL_TREE;
11837 call = NULL_TREE;
11838 retval = NULL_TREE;
11840 if (!init_target_chars ())
11841 return NULL_TREE;
11843 /* If the format doesn't contain % args or %%, use strcpy. */
11844 if (strchr (fmt_str, target_percent) == NULL)
11846 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11848 if (!fn)
11849 return NULL_TREE;
11851 /* Don't optimize sprintf (buf, "abc", ptr++). */
11852 if (orig)
11853 return NULL_TREE;
11855 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11856 'format' is known to contain no % formats. */
11857 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11858 if (!ignored)
11859 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11862 /* If the format is "%s", use strcpy if the result isn't used. */
11863 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11865 tree fn;
11866 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11868 if (!fn)
11869 return NULL_TREE;
11871 /* Don't crash on sprintf (str1, "%s"). */
11872 if (!orig)
11873 return NULL_TREE;
11875 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11876 if (!ignored)
11878 retval = c_strlen (orig, 1);
11879 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11880 return NULL_TREE;
11882 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11885 if (call && retval)
11887 retval = fold_convert_loc
11888 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11889 retval);
11890 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11892 else
11893 return call;
11896 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
11897 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
11898 attempt to simplify calls with more than 4 arguments.
11900 Return NULL_TREE if no simplification was possible, otherwise return the
11901 simplified form of the call as a tree. If IGNORED is true, it means that
11902 the caller does not use the returned value of the function. */
11904 static tree
11905 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
11906 tree orig, int ignored)
11908 tree call, retval;
11909 const char *fmt_str = NULL;
11910 unsigned HOST_WIDE_INT destlen;
11912 /* Verify the required arguments in the original call. We deal with two
11913 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
11914 'snprintf (dest, cst, "%s", orig)'. */
11915 if (!validate_arg (dest, POINTER_TYPE)
11916 || !validate_arg (destsize, INTEGER_TYPE)
11917 || !validate_arg (fmt, POINTER_TYPE))
11918 return NULL_TREE;
11919 if (orig && !validate_arg (orig, POINTER_TYPE))
11920 return NULL_TREE;
11922 if (!host_integerp (destsize, 1))
11923 return NULL_TREE;
11925 /* Check whether the format is a literal string constant. */
11926 fmt_str = c_getstr (fmt);
11927 if (fmt_str == NULL)
11928 return NULL_TREE;
11930 call = NULL_TREE;
11931 retval = NULL_TREE;
11933 if (!init_target_chars ())
11934 return NULL_TREE;
11936 destlen = tree_low_cst (destsize, 1);
11938 /* If the format doesn't contain % args or %%, use strcpy. */
11939 if (strchr (fmt_str, target_percent) == NULL)
11941 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11942 size_t len = strlen (fmt_str);
11944 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
11945 if (orig)
11946 return NULL_TREE;
11948 /* We could expand this as
11949 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
11950 or to
11951 memcpy (str, fmt_with_nul_at_cstm1, cst);
11952 but in the former case that might increase code size
11953 and in the latter case grow .rodata section too much.
11954 So punt for now. */
11955 if (len >= destlen)
11956 return NULL_TREE;
11958 if (!fn)
11959 return NULL_TREE;
11961 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
11962 'format' is known to contain no % formats and
11963 strlen (fmt) < cst. */
11964 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11966 if (!ignored)
11967 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11970 /* If the format is "%s", use strcpy if the result isn't used. */
11971 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11973 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11974 unsigned HOST_WIDE_INT origlen;
11976 /* Don't crash on snprintf (str1, cst, "%s"). */
11977 if (!orig)
11978 return NULL_TREE;
11980 retval = c_strlen (orig, 1);
11981 if (!retval || !host_integerp (retval, 1))
11982 return NULL_TREE;
11984 origlen = tree_low_cst (retval, 1);
11985 /* We could expand this as
11986 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
11987 or to
11988 memcpy (str1, str2_with_nul_at_cstm1, cst);
11989 but in the former case that might increase code size
11990 and in the latter case grow .rodata section too much.
11991 So punt for now. */
11992 if (origlen >= destlen)
11993 return NULL_TREE;
11995 /* Convert snprintf (str1, cst, "%s", str2) into
11996 strcpy (str1, str2) if strlen (str2) < cst. */
11997 if (!fn)
11998 return NULL_TREE;
12000 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12002 if (ignored)
12003 retval = NULL_TREE;
12006 if (call && retval)
12008 tree fn = built_in_decls[BUILT_IN_SNPRINTF];
12009 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12010 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12012 else
12013 return call;
12016 /* Expand a call EXP to __builtin_object_size. */
12019 expand_builtin_object_size (tree exp)
12021 tree ost;
12022 int object_size_type;
12023 tree fndecl = get_callee_fndecl (exp);
12025 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12027 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12028 exp, fndecl);
12029 expand_builtin_trap ();
12030 return const0_rtx;
12033 ost = CALL_EXPR_ARG (exp, 1);
12034 STRIP_NOPS (ost);
12036 if (TREE_CODE (ost) != INTEGER_CST
12037 || tree_int_cst_sgn (ost) < 0
12038 || compare_tree_int (ost, 3) > 0)
12040 error ("%Klast argument of %D is not integer constant between 0 and 3",
12041 exp, fndecl);
12042 expand_builtin_trap ();
12043 return const0_rtx;
12046 object_size_type = tree_low_cst (ost, 0);
12048 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12051 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12052 FCODE is the BUILT_IN_* to use.
12053 Return NULL_RTX if we failed; the caller should emit a normal call,
12054 otherwise try to get the result in TARGET, if convenient (and in
12055 mode MODE if that's convenient). */
12057 static rtx
12058 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12059 enum built_in_function fcode)
12061 tree dest, src, len, size;
12063 if (!validate_arglist (exp,
12064 POINTER_TYPE,
12065 fcode == BUILT_IN_MEMSET_CHK
12066 ? INTEGER_TYPE : POINTER_TYPE,
12067 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12068 return NULL_RTX;
12070 dest = CALL_EXPR_ARG (exp, 0);
12071 src = CALL_EXPR_ARG (exp, 1);
12072 len = CALL_EXPR_ARG (exp, 2);
12073 size = CALL_EXPR_ARG (exp, 3);
12075 if (! host_integerp (size, 1))
12076 return NULL_RTX;
12078 if (host_integerp (len, 1) || integer_all_onesp (size))
12080 tree fn;
12082 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12084 warning_at (tree_nonartificial_location (exp),
12085 0, "%Kcall to %D will always overflow destination buffer",
12086 exp, get_callee_fndecl (exp));
12087 return NULL_RTX;
12090 fn = NULL_TREE;
12091 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12092 mem{cpy,pcpy,move,set} is available. */
12093 switch (fcode)
12095 case BUILT_IN_MEMCPY_CHK:
12096 fn = built_in_decls[BUILT_IN_MEMCPY];
12097 break;
12098 case BUILT_IN_MEMPCPY_CHK:
12099 fn = built_in_decls[BUILT_IN_MEMPCPY];
12100 break;
12101 case BUILT_IN_MEMMOVE_CHK:
12102 fn = built_in_decls[BUILT_IN_MEMMOVE];
12103 break;
12104 case BUILT_IN_MEMSET_CHK:
12105 fn = built_in_decls[BUILT_IN_MEMSET];
12106 break;
12107 default:
12108 break;
12111 if (! fn)
12112 return NULL_RTX;
12114 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12115 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12116 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12117 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12119 else if (fcode == BUILT_IN_MEMSET_CHK)
12120 return NULL_RTX;
12121 else
12123 unsigned int dest_align
12124 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12126 /* If DEST is not a pointer type, call the normal function. */
12127 if (dest_align == 0)
12128 return NULL_RTX;
12130 /* If SRC and DEST are the same (and not volatile), do nothing. */
12131 if (operand_equal_p (src, dest, 0))
12133 tree expr;
12135 if (fcode != BUILT_IN_MEMPCPY_CHK)
12137 /* Evaluate and ignore LEN in case it has side-effects. */
12138 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12139 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12142 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12143 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12146 /* __memmove_chk special case. */
12147 if (fcode == BUILT_IN_MEMMOVE_CHK)
12149 unsigned int src_align
12150 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12152 if (src_align == 0)
12153 return NULL_RTX;
12155 /* If src is categorized for a readonly section we can use
12156 normal __memcpy_chk. */
12157 if (readonly_data_expr (src))
12159 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12160 if (!fn)
12161 return NULL_RTX;
12162 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12163 dest, src, len, size);
12164 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12165 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12166 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12169 return NULL_RTX;
12173 /* Emit warning if a buffer overflow is detected at compile time. */
12175 static void
12176 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12178 int is_strlen = 0;
12179 tree len, size;
12180 location_t loc = tree_nonartificial_location (exp);
12182 switch (fcode)
12184 case BUILT_IN_STRCPY_CHK:
12185 case BUILT_IN_STPCPY_CHK:
12186 /* For __strcat_chk the warning will be emitted only if overflowing
12187 by at least strlen (dest) + 1 bytes. */
12188 case BUILT_IN_STRCAT_CHK:
12189 len = CALL_EXPR_ARG (exp, 1);
12190 size = CALL_EXPR_ARG (exp, 2);
12191 is_strlen = 1;
12192 break;
12193 case BUILT_IN_STRNCAT_CHK:
12194 case BUILT_IN_STRNCPY_CHK:
12195 len = CALL_EXPR_ARG (exp, 2);
12196 size = CALL_EXPR_ARG (exp, 3);
12197 break;
12198 case BUILT_IN_SNPRINTF_CHK:
12199 case BUILT_IN_VSNPRINTF_CHK:
12200 len = CALL_EXPR_ARG (exp, 1);
12201 size = CALL_EXPR_ARG (exp, 3);
12202 break;
12203 default:
12204 gcc_unreachable ();
12207 if (!len || !size)
12208 return;
12210 if (! host_integerp (size, 1) || integer_all_onesp (size))
12211 return;
12213 if (is_strlen)
12215 len = c_strlen (len, 1);
12216 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12217 return;
12219 else if (fcode == BUILT_IN_STRNCAT_CHK)
12221 tree src = CALL_EXPR_ARG (exp, 1);
12222 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12223 return;
12224 src = c_strlen (src, 1);
12225 if (! src || ! host_integerp (src, 1))
12227 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12228 exp, get_callee_fndecl (exp));
12229 return;
12231 else if (tree_int_cst_lt (src, size))
12232 return;
12234 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12235 return;
12237 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12238 exp, get_callee_fndecl (exp));
12241 /* Emit warning if a buffer overflow is detected at compile time
12242 in __sprintf_chk/__vsprintf_chk calls. */
12244 static void
12245 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12247 tree size, len, fmt;
12248 const char *fmt_str;
12249 int nargs = call_expr_nargs (exp);
12251 /* Verify the required arguments in the original call. */
12253 if (nargs < 4)
12254 return;
12255 size = CALL_EXPR_ARG (exp, 2);
12256 fmt = CALL_EXPR_ARG (exp, 3);
12258 if (! host_integerp (size, 1) || integer_all_onesp (size))
12259 return;
12261 /* Check whether the format is a literal string constant. */
12262 fmt_str = c_getstr (fmt);
12263 if (fmt_str == NULL)
12264 return;
12266 if (!init_target_chars ())
12267 return;
12269 /* If the format doesn't contain % args or %%, we know its size. */
12270 if (strchr (fmt_str, target_percent) == 0)
12271 len = build_int_cstu (size_type_node, strlen (fmt_str));
12272 /* If the format is "%s" and first ... argument is a string literal,
12273 we know it too. */
12274 else if (fcode == BUILT_IN_SPRINTF_CHK
12275 && strcmp (fmt_str, target_percent_s) == 0)
12277 tree arg;
12279 if (nargs < 5)
12280 return;
12281 arg = CALL_EXPR_ARG (exp, 4);
12282 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12283 return;
12285 len = c_strlen (arg, 1);
12286 if (!len || ! host_integerp (len, 1))
12287 return;
12289 else
12290 return;
12292 if (! tree_int_cst_lt (len, size))
12293 warning_at (tree_nonartificial_location (exp),
12294 0, "%Kcall to %D will always overflow destination buffer",
12295 exp, get_callee_fndecl (exp));
12298 /* Emit warning if a free is called with address of a variable. */
12300 static void
12301 maybe_emit_free_warning (tree exp)
12303 tree arg = CALL_EXPR_ARG (exp, 0);
12305 STRIP_NOPS (arg);
12306 if (TREE_CODE (arg) != ADDR_EXPR)
12307 return;
12309 arg = get_base_address (TREE_OPERAND (arg, 0));
12310 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12311 return;
12313 if (SSA_VAR_P (arg))
12314 warning_at (tree_nonartificial_location (exp),
12315 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12316 else
12317 warning_at (tree_nonartificial_location (exp),
12318 0, "%Kattempt to free a non-heap object", exp);
12321 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12322 if possible. */
12324 tree
12325 fold_builtin_object_size (tree ptr, tree ost)
12327 unsigned HOST_WIDE_INT bytes;
12328 int object_size_type;
12330 if (!validate_arg (ptr, POINTER_TYPE)
12331 || !validate_arg (ost, INTEGER_TYPE))
12332 return NULL_TREE;
12334 STRIP_NOPS (ost);
12336 if (TREE_CODE (ost) != INTEGER_CST
12337 || tree_int_cst_sgn (ost) < 0
12338 || compare_tree_int (ost, 3) > 0)
12339 return NULL_TREE;
12341 object_size_type = tree_low_cst (ost, 0);
12343 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12344 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12345 and (size_t) 0 for types 2 and 3. */
12346 if (TREE_SIDE_EFFECTS (ptr))
12347 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12349 if (TREE_CODE (ptr) == ADDR_EXPR)
12351 bytes = compute_builtin_object_size (ptr, object_size_type);
12352 if (double_int_fits_to_tree_p (size_type_node,
12353 uhwi_to_double_int (bytes)))
12354 return build_int_cstu (size_type_node, bytes);
12356 else if (TREE_CODE (ptr) == SSA_NAME)
12358 /* If object size is not known yet, delay folding until
12359 later. Maybe subsequent passes will help determining
12360 it. */
12361 bytes = compute_builtin_object_size (ptr, object_size_type);
12362 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12363 && double_int_fits_to_tree_p (size_type_node,
12364 uhwi_to_double_int (bytes)))
12365 return build_int_cstu (size_type_node, bytes);
12368 return NULL_TREE;
12371 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12372 DEST, SRC, LEN, and SIZE are the arguments to the call.
12373 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12374 code of the builtin. If MAXLEN is not NULL, it is maximum length
12375 passed as third argument. */
12377 tree
12378 fold_builtin_memory_chk (location_t loc, tree fndecl,
12379 tree dest, tree src, tree len, tree size,
12380 tree maxlen, bool ignore,
12381 enum built_in_function fcode)
12383 tree fn;
12385 if (!validate_arg (dest, POINTER_TYPE)
12386 || !validate_arg (src,
12387 (fcode == BUILT_IN_MEMSET_CHK
12388 ? INTEGER_TYPE : POINTER_TYPE))
12389 || !validate_arg (len, INTEGER_TYPE)
12390 || !validate_arg (size, INTEGER_TYPE))
12391 return NULL_TREE;
12393 /* If SRC and DEST are the same (and not volatile), return DEST
12394 (resp. DEST+LEN for __mempcpy_chk). */
12395 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12397 if (fcode != BUILT_IN_MEMPCPY_CHK)
12398 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12399 dest, len);
12400 else
12402 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12403 dest, len);
12404 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12408 if (! host_integerp (size, 1))
12409 return NULL_TREE;
12411 if (! integer_all_onesp (size))
12413 if (! host_integerp (len, 1))
12415 /* If LEN is not constant, try MAXLEN too.
12416 For MAXLEN only allow optimizing into non-_ocs function
12417 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12418 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12420 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12422 /* (void) __mempcpy_chk () can be optimized into
12423 (void) __memcpy_chk (). */
12424 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12425 if (!fn)
12426 return NULL_TREE;
12428 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12430 return NULL_TREE;
12433 else
12434 maxlen = len;
12436 if (tree_int_cst_lt (size, maxlen))
12437 return NULL_TREE;
12440 fn = NULL_TREE;
12441 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12442 mem{cpy,pcpy,move,set} is available. */
12443 switch (fcode)
12445 case BUILT_IN_MEMCPY_CHK:
12446 fn = built_in_decls[BUILT_IN_MEMCPY];
12447 break;
12448 case BUILT_IN_MEMPCPY_CHK:
12449 fn = built_in_decls[BUILT_IN_MEMPCPY];
12450 break;
12451 case BUILT_IN_MEMMOVE_CHK:
12452 fn = built_in_decls[BUILT_IN_MEMMOVE];
12453 break;
12454 case BUILT_IN_MEMSET_CHK:
12455 fn = built_in_decls[BUILT_IN_MEMSET];
12456 break;
12457 default:
12458 break;
12461 if (!fn)
12462 return NULL_TREE;
12464 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12467 /* Fold a call to the __st[rp]cpy_chk builtin.
12468 DEST, SRC, and SIZE are the arguments to the call.
12469 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12470 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12471 strings passed as second argument. */
12473 tree
12474 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12475 tree src, tree size,
12476 tree maxlen, bool ignore,
12477 enum built_in_function fcode)
12479 tree len, fn;
12481 if (!validate_arg (dest, POINTER_TYPE)
12482 || !validate_arg (src, POINTER_TYPE)
12483 || !validate_arg (size, INTEGER_TYPE))
12484 return NULL_TREE;
12486 /* If SRC and DEST are the same (and not volatile), return DEST. */
12487 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12488 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12490 if (! host_integerp (size, 1))
12491 return NULL_TREE;
12493 if (! integer_all_onesp (size))
12495 len = c_strlen (src, 1);
12496 if (! len || ! host_integerp (len, 1))
12498 /* If LEN is not constant, try MAXLEN too.
12499 For MAXLEN only allow optimizing into non-_ocs function
12500 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12501 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12503 if (fcode == BUILT_IN_STPCPY_CHK)
12505 if (! ignore)
12506 return NULL_TREE;
12508 /* If return value of __stpcpy_chk is ignored,
12509 optimize into __strcpy_chk. */
12510 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12511 if (!fn)
12512 return NULL_TREE;
12514 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12517 if (! len || TREE_SIDE_EFFECTS (len))
12518 return NULL_TREE;
12520 /* If c_strlen returned something, but not a constant,
12521 transform __strcpy_chk into __memcpy_chk. */
12522 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12523 if (!fn)
12524 return NULL_TREE;
12526 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12527 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12528 build_call_expr_loc (loc, fn, 4,
12529 dest, src, len, size));
12532 else
12533 maxlen = len;
12535 if (! tree_int_cst_lt (maxlen, size))
12536 return NULL_TREE;
12539 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12540 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12541 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12542 if (!fn)
12543 return NULL_TREE;
12545 return build_call_expr_loc (loc, fn, 2, dest, src);
12548 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12549 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12550 length passed as third argument. */
12552 tree
12553 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12554 tree len, tree size, tree maxlen)
12556 tree fn;
12558 if (!validate_arg (dest, POINTER_TYPE)
12559 || !validate_arg (src, POINTER_TYPE)
12560 || !validate_arg (len, INTEGER_TYPE)
12561 || !validate_arg (size, INTEGER_TYPE))
12562 return NULL_TREE;
12564 if (! host_integerp (size, 1))
12565 return NULL_TREE;
12567 if (! integer_all_onesp (size))
12569 if (! host_integerp (len, 1))
12571 /* If LEN is not constant, try MAXLEN too.
12572 For MAXLEN only allow optimizing into non-_ocs function
12573 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12574 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12575 return NULL_TREE;
12577 else
12578 maxlen = len;
12580 if (tree_int_cst_lt (size, maxlen))
12581 return NULL_TREE;
12584 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12585 fn = built_in_decls[BUILT_IN_STRNCPY];
12586 if (!fn)
12587 return NULL_TREE;
12589 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12592 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12593 are the arguments to the call. */
12595 static tree
12596 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12597 tree src, tree size)
12599 tree fn;
12600 const char *p;
12602 if (!validate_arg (dest, POINTER_TYPE)
12603 || !validate_arg (src, POINTER_TYPE)
12604 || !validate_arg (size, INTEGER_TYPE))
12605 return NULL_TREE;
12607 p = c_getstr (src);
12608 /* If the SRC parameter is "", return DEST. */
12609 if (p && *p == '\0')
12610 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12612 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12613 return NULL_TREE;
12615 /* If __builtin_strcat_chk is used, assume strcat is available. */
12616 fn = built_in_decls[BUILT_IN_STRCAT];
12617 if (!fn)
12618 return NULL_TREE;
12620 return build_call_expr_loc (loc, fn, 2, dest, src);
12623 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12624 LEN, and SIZE. */
12626 static tree
12627 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12628 tree dest, tree src, tree len, tree size)
12630 tree fn;
12631 const char *p;
12633 if (!validate_arg (dest, POINTER_TYPE)
12634 || !validate_arg (src, POINTER_TYPE)
12635 || !validate_arg (size, INTEGER_TYPE)
12636 || !validate_arg (size, INTEGER_TYPE))
12637 return NULL_TREE;
12639 p = c_getstr (src);
12640 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12641 if (p && *p == '\0')
12642 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12643 else if (integer_zerop (len))
12644 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12646 if (! host_integerp (size, 1))
12647 return NULL_TREE;
12649 if (! integer_all_onesp (size))
12651 tree src_len = c_strlen (src, 1);
12652 if (src_len
12653 && host_integerp (src_len, 1)
12654 && host_integerp (len, 1)
12655 && ! tree_int_cst_lt (len, src_len))
12657 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12658 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12659 if (!fn)
12660 return NULL_TREE;
12662 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12664 return NULL_TREE;
12667 /* If __builtin_strncat_chk is used, assume strncat is available. */
12668 fn = built_in_decls[BUILT_IN_STRNCAT];
12669 if (!fn)
12670 return NULL_TREE;
12672 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12675 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12676 Return NULL_TREE if a normal call should be emitted rather than
12677 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12678 or BUILT_IN_VSPRINTF_CHK. */
12680 static tree
12681 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12682 enum built_in_function fcode)
12684 tree dest, size, len, fn, fmt, flag;
12685 const char *fmt_str;
12687 /* Verify the required arguments in the original call. */
12688 if (nargs < 4)
12689 return NULL_TREE;
12690 dest = args[0];
12691 if (!validate_arg (dest, POINTER_TYPE))
12692 return NULL_TREE;
12693 flag = args[1];
12694 if (!validate_arg (flag, INTEGER_TYPE))
12695 return NULL_TREE;
12696 size = args[2];
12697 if (!validate_arg (size, INTEGER_TYPE))
12698 return NULL_TREE;
12699 fmt = args[3];
12700 if (!validate_arg (fmt, POINTER_TYPE))
12701 return NULL_TREE;
12703 if (! host_integerp (size, 1))
12704 return NULL_TREE;
12706 len = NULL_TREE;
12708 if (!init_target_chars ())
12709 return NULL_TREE;
12711 /* Check whether the format is a literal string constant. */
12712 fmt_str = c_getstr (fmt);
12713 if (fmt_str != NULL)
12715 /* If the format doesn't contain % args or %%, we know the size. */
12716 if (strchr (fmt_str, target_percent) == 0)
12718 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12719 len = build_int_cstu (size_type_node, strlen (fmt_str));
12721 /* If the format is "%s" and first ... argument is a string literal,
12722 we know the size too. */
12723 else if (fcode == BUILT_IN_SPRINTF_CHK
12724 && strcmp (fmt_str, target_percent_s) == 0)
12726 tree arg;
12728 if (nargs == 5)
12730 arg = args[4];
12731 if (validate_arg (arg, POINTER_TYPE))
12733 len = c_strlen (arg, 1);
12734 if (! len || ! host_integerp (len, 1))
12735 len = NULL_TREE;
12741 if (! integer_all_onesp (size))
12743 if (! len || ! tree_int_cst_lt (len, size))
12744 return NULL_TREE;
12747 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12748 or if format doesn't contain % chars or is "%s". */
12749 if (! integer_zerop (flag))
12751 if (fmt_str == NULL)
12752 return NULL_TREE;
12753 if (strchr (fmt_str, target_percent) != NULL
12754 && strcmp (fmt_str, target_percent_s))
12755 return NULL_TREE;
12758 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12759 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12760 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12761 if (!fn)
12762 return NULL_TREE;
12764 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
12767 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12768 a normal call should be emitted rather than expanding the function
12769 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12771 static tree
12772 fold_builtin_sprintf_chk (location_t loc, tree exp,
12773 enum built_in_function fcode)
12775 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
12776 CALL_EXPR_ARGP (exp), fcode);
12779 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
12780 NULL_TREE if a normal call should be emitted rather than expanding
12781 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12782 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12783 passed as second argument. */
12785 static tree
12786 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
12787 tree maxlen, enum built_in_function fcode)
12789 tree dest, size, len, fn, fmt, flag;
12790 const char *fmt_str;
12792 /* Verify the required arguments in the original call. */
12793 if (nargs < 5)
12794 return NULL_TREE;
12795 dest = args[0];
12796 if (!validate_arg (dest, POINTER_TYPE))
12797 return NULL_TREE;
12798 len = args[1];
12799 if (!validate_arg (len, INTEGER_TYPE))
12800 return NULL_TREE;
12801 flag = args[2];
12802 if (!validate_arg (flag, INTEGER_TYPE))
12803 return NULL_TREE;
12804 size = args[3];
12805 if (!validate_arg (size, INTEGER_TYPE))
12806 return NULL_TREE;
12807 fmt = args[4];
12808 if (!validate_arg (fmt, POINTER_TYPE))
12809 return NULL_TREE;
12811 if (! host_integerp (size, 1))
12812 return NULL_TREE;
12814 if (! integer_all_onesp (size))
12816 if (! host_integerp (len, 1))
12818 /* If LEN is not constant, try MAXLEN too.
12819 For MAXLEN only allow optimizing into non-_ocs function
12820 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12821 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12822 return NULL_TREE;
12824 else
12825 maxlen = len;
12827 if (tree_int_cst_lt (size, maxlen))
12828 return NULL_TREE;
12831 if (!init_target_chars ())
12832 return NULL_TREE;
12834 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12835 or if format doesn't contain % chars or is "%s". */
12836 if (! integer_zerop (flag))
12838 fmt_str = c_getstr (fmt);
12839 if (fmt_str == NULL)
12840 return NULL_TREE;
12841 if (strchr (fmt_str, target_percent) != NULL
12842 && strcmp (fmt_str, target_percent_s))
12843 return NULL_TREE;
12846 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12847 available. */
12848 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12849 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12850 if (!fn)
12851 return NULL_TREE;
12853 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
12856 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12857 a normal call should be emitted rather than expanding the function
12858 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12859 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12860 passed as second argument. */
12862 tree
12863 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12864 enum built_in_function fcode)
12866 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
12867 CALL_EXPR_ARGP (exp), maxlen, fcode);
12870 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12871 FMT and ARG are the arguments to the call; we don't fold cases with
12872 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12874 Return NULL_TREE if no simplification was possible, otherwise return the
12875 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12876 code of the function to be simplified. */
12878 static tree
12879 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12880 tree arg, bool ignore,
12881 enum built_in_function fcode)
12883 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12884 const char *fmt_str = NULL;
12886 /* If the return value is used, don't do the transformation. */
12887 if (! ignore)
12888 return NULL_TREE;
12890 /* Verify the required arguments in the original call. */
12891 if (!validate_arg (fmt, POINTER_TYPE))
12892 return NULL_TREE;
12894 /* Check whether the format is a literal string constant. */
12895 fmt_str = c_getstr (fmt);
12896 if (fmt_str == NULL)
12897 return NULL_TREE;
12899 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12901 /* If we're using an unlocked function, assume the other
12902 unlocked functions exist explicitly. */
12903 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12904 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12906 else
12908 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12909 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12912 if (!init_target_chars ())
12913 return NULL_TREE;
12915 if (strcmp (fmt_str, target_percent_s) == 0
12916 || strchr (fmt_str, target_percent) == NULL)
12918 const char *str;
12920 if (strcmp (fmt_str, target_percent_s) == 0)
12922 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12923 return NULL_TREE;
12925 if (!arg || !validate_arg (arg, POINTER_TYPE))
12926 return NULL_TREE;
12928 str = c_getstr (arg);
12929 if (str == NULL)
12930 return NULL_TREE;
12932 else
12934 /* The format specifier doesn't contain any '%' characters. */
12935 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12936 && arg)
12937 return NULL_TREE;
12938 str = fmt_str;
12941 /* If the string was "", printf does nothing. */
12942 if (str[0] == '\0')
12943 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12945 /* If the string has length of 1, call putchar. */
12946 if (str[1] == '\0')
12948 /* Given printf("c"), (where c is any one character,)
12949 convert "c"[0] to an int and pass that to the replacement
12950 function. */
12951 newarg = build_int_cst (NULL_TREE, str[0]);
12952 if (fn_putchar)
12953 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12955 else
12957 /* If the string was "string\n", call puts("string"). */
12958 size_t len = strlen (str);
12959 if ((unsigned char)str[len - 1] == target_newline
12960 && (size_t) (int) len == len
12961 && (int) len > 0)
12963 char *newstr;
12964 tree offset_node, string_cst;
12966 /* Create a NUL-terminated string that's one char shorter
12967 than the original, stripping off the trailing '\n'. */
12968 newarg = build_string_literal (len, str);
12969 string_cst = string_constant (newarg, &offset_node);
12970 gcc_checking_assert (string_cst
12971 && (TREE_STRING_LENGTH (string_cst)
12972 == (int) len)
12973 && integer_zerop (offset_node)
12974 && (unsigned char)
12975 TREE_STRING_POINTER (string_cst)[len - 1]
12976 == target_newline);
12977 /* build_string_literal creates a new STRING_CST,
12978 modify it in place to avoid double copying. */
12979 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
12980 newstr[len - 1] = '\0';
12981 if (fn_puts)
12982 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12984 else
12985 /* We'd like to arrange to call fputs(string,stdout) here,
12986 but we need stdout and don't have a way to get it yet. */
12987 return NULL_TREE;
12991 /* The other optimizations can be done only on the non-va_list variants. */
12992 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12993 return NULL_TREE;
12995 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12996 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12998 if (!arg || !validate_arg (arg, POINTER_TYPE))
12999 return NULL_TREE;
13000 if (fn_puts)
13001 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13004 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13005 else if (strcmp (fmt_str, target_percent_c) == 0)
13007 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13008 return NULL_TREE;
13009 if (fn_putchar)
13010 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13013 if (!call)
13014 return NULL_TREE;
13016 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13019 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13020 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13021 more than 3 arguments, and ARG may be null in the 2-argument case.
13023 Return NULL_TREE if no simplification was possible, otherwise return the
13024 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13025 code of the function to be simplified. */
13027 static tree
13028 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13029 tree fmt, tree arg, bool ignore,
13030 enum built_in_function fcode)
13032 tree fn_fputc, fn_fputs, call = NULL_TREE;
13033 const char *fmt_str = NULL;
13035 /* If the return value is used, don't do the transformation. */
13036 if (! ignore)
13037 return NULL_TREE;
13039 /* Verify the required arguments in the original call. */
13040 if (!validate_arg (fp, POINTER_TYPE))
13041 return NULL_TREE;
13042 if (!validate_arg (fmt, POINTER_TYPE))
13043 return NULL_TREE;
13045 /* Check whether the format is a literal string constant. */
13046 fmt_str = c_getstr (fmt);
13047 if (fmt_str == NULL)
13048 return NULL_TREE;
13050 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13052 /* If we're using an unlocked function, assume the other
13053 unlocked functions exist explicitly. */
13054 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
13055 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
13057 else
13059 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
13060 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
13063 if (!init_target_chars ())
13064 return NULL_TREE;
13066 /* If the format doesn't contain % args or %%, use strcpy. */
13067 if (strchr (fmt_str, target_percent) == NULL)
13069 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13070 && arg)
13071 return NULL_TREE;
13073 /* If the format specifier was "", fprintf does nothing. */
13074 if (fmt_str[0] == '\0')
13076 /* If FP has side-effects, just wait until gimplification is
13077 done. */
13078 if (TREE_SIDE_EFFECTS (fp))
13079 return NULL_TREE;
13081 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13084 /* When "string" doesn't contain %, replace all cases of
13085 fprintf (fp, string) with fputs (string, fp). The fputs
13086 builtin will take care of special cases like length == 1. */
13087 if (fn_fputs)
13088 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13091 /* The other optimizations can be done only on the non-va_list variants. */
13092 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13093 return NULL_TREE;
13095 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13096 else if (strcmp (fmt_str, target_percent_s) == 0)
13098 if (!arg || !validate_arg (arg, POINTER_TYPE))
13099 return NULL_TREE;
13100 if (fn_fputs)
13101 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13104 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13105 else if (strcmp (fmt_str, target_percent_c) == 0)
13107 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13108 return NULL_TREE;
13109 if (fn_fputc)
13110 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13113 if (!call)
13114 return NULL_TREE;
13115 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13118 /* Initialize format string characters in the target charset. */
13120 static bool
13121 init_target_chars (void)
13123 static bool init;
13124 if (!init)
13126 target_newline = lang_hooks.to_target_charset ('\n');
13127 target_percent = lang_hooks.to_target_charset ('%');
13128 target_c = lang_hooks.to_target_charset ('c');
13129 target_s = lang_hooks.to_target_charset ('s');
13130 if (target_newline == 0 || target_percent == 0 || target_c == 0
13131 || target_s == 0)
13132 return false;
13134 target_percent_c[0] = target_percent;
13135 target_percent_c[1] = target_c;
13136 target_percent_c[2] = '\0';
13138 target_percent_s[0] = target_percent;
13139 target_percent_s[1] = target_s;
13140 target_percent_s[2] = '\0';
13142 target_percent_s_newline[0] = target_percent;
13143 target_percent_s_newline[1] = target_s;
13144 target_percent_s_newline[2] = target_newline;
13145 target_percent_s_newline[3] = '\0';
13147 init = true;
13149 return true;
13152 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13153 and no overflow/underflow occurred. INEXACT is true if M was not
13154 exactly calculated. TYPE is the tree type for the result. This
13155 function assumes that you cleared the MPFR flags and then
13156 calculated M to see if anything subsequently set a flag prior to
13157 entering this function. Return NULL_TREE if any checks fail. */
13159 static tree
13160 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13162 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13163 overflow/underflow occurred. If -frounding-math, proceed iff the
13164 result of calling FUNC was exact. */
13165 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13166 && (!flag_rounding_math || !inexact))
13168 REAL_VALUE_TYPE rr;
13170 real_from_mpfr (&rr, m, type, GMP_RNDN);
13171 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13172 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13173 but the mpft_t is not, then we underflowed in the
13174 conversion. */
13175 if (real_isfinite (&rr)
13176 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13178 REAL_VALUE_TYPE rmode;
13180 real_convert (&rmode, TYPE_MODE (type), &rr);
13181 /* Proceed iff the specified mode can hold the value. */
13182 if (real_identical (&rmode, &rr))
13183 return build_real (type, rmode);
13186 return NULL_TREE;
13189 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13190 number and no overflow/underflow occurred. INEXACT is true if M
13191 was not exactly calculated. TYPE is the tree type for the result.
13192 This function assumes that you cleared the MPFR flags and then
13193 calculated M to see if anything subsequently set a flag prior to
13194 entering this function. Return NULL_TREE if any checks fail, if
13195 FORCE_CONVERT is true, then bypass the checks. */
13197 static tree
13198 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13200 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13201 overflow/underflow occurred. If -frounding-math, proceed iff the
13202 result of calling FUNC was exact. */
13203 if (force_convert
13204 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13205 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13206 && (!flag_rounding_math || !inexact)))
13208 REAL_VALUE_TYPE re, im;
13210 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13211 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13212 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13213 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13214 but the mpft_t is not, then we underflowed in the
13215 conversion. */
13216 if (force_convert
13217 || (real_isfinite (&re) && real_isfinite (&im)
13218 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13219 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13221 REAL_VALUE_TYPE re_mode, im_mode;
13223 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13224 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13225 /* Proceed iff the specified mode can hold the value. */
13226 if (force_convert
13227 || (real_identical (&re_mode, &re)
13228 && real_identical (&im_mode, &im)))
13229 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13230 build_real (TREE_TYPE (type), im_mode));
13233 return NULL_TREE;
13236 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13237 FUNC on it and return the resulting value as a tree with type TYPE.
13238 If MIN and/or MAX are not NULL, then the supplied ARG must be
13239 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13240 acceptable values, otherwise they are not. The mpfr precision is
13241 set to the precision of TYPE. We assume that function FUNC returns
13242 zero if the result could be calculated exactly within the requested
13243 precision. */
13245 static tree
13246 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13247 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13248 bool inclusive)
13250 tree result = NULL_TREE;
13252 STRIP_NOPS (arg);
13254 /* To proceed, MPFR must exactly represent the target floating point
13255 format, which only happens when the target base equals two. */
13256 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13257 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13259 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13261 if (real_isfinite (ra)
13262 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13263 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13265 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13266 const int prec = fmt->p;
13267 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13268 int inexact;
13269 mpfr_t m;
13271 mpfr_init2 (m, prec);
13272 mpfr_from_real (m, ra, GMP_RNDN);
13273 mpfr_clear_flags ();
13274 inexact = func (m, m, rnd);
13275 result = do_mpfr_ckconv (m, type, inexact);
13276 mpfr_clear (m);
13280 return result;
13283 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13284 FUNC on it and return the resulting value as a tree with type TYPE.
13285 The mpfr precision is set to the precision of TYPE. We assume that
13286 function FUNC returns zero if the result could be calculated
13287 exactly within the requested precision. */
13289 static tree
13290 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13291 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13293 tree result = NULL_TREE;
13295 STRIP_NOPS (arg1);
13296 STRIP_NOPS (arg2);
13298 /* To proceed, MPFR must exactly represent the target floating point
13299 format, which only happens when the target base equals two. */
13300 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13301 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13302 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13304 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13305 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13307 if (real_isfinite (ra1) && real_isfinite (ra2))
13309 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13310 const int prec = fmt->p;
13311 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13312 int inexact;
13313 mpfr_t m1, m2;
13315 mpfr_inits2 (prec, m1, m2, NULL);
13316 mpfr_from_real (m1, ra1, GMP_RNDN);
13317 mpfr_from_real (m2, ra2, GMP_RNDN);
13318 mpfr_clear_flags ();
13319 inexact = func (m1, m1, m2, rnd);
13320 result = do_mpfr_ckconv (m1, type, inexact);
13321 mpfr_clears (m1, m2, NULL);
13325 return result;
13328 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13329 FUNC on it and return the resulting value as a tree with type TYPE.
13330 The mpfr precision is set to the precision of TYPE. We assume that
13331 function FUNC returns zero if the result could be calculated
13332 exactly within the requested precision. */
13334 static tree
13335 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13336 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13338 tree result = NULL_TREE;
13340 STRIP_NOPS (arg1);
13341 STRIP_NOPS (arg2);
13342 STRIP_NOPS (arg3);
13344 /* To proceed, MPFR must exactly represent the target floating point
13345 format, which only happens when the target base equals two. */
13346 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13347 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13348 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13349 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13351 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13352 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13353 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13355 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13357 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13358 const int prec = fmt->p;
13359 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13360 int inexact;
13361 mpfr_t m1, m2, m3;
13363 mpfr_inits2 (prec, m1, m2, m3, NULL);
13364 mpfr_from_real (m1, ra1, GMP_RNDN);
13365 mpfr_from_real (m2, ra2, GMP_RNDN);
13366 mpfr_from_real (m3, ra3, GMP_RNDN);
13367 mpfr_clear_flags ();
13368 inexact = func (m1, m1, m2, m3, rnd);
13369 result = do_mpfr_ckconv (m1, type, inexact);
13370 mpfr_clears (m1, m2, m3, NULL);
13374 return result;
13377 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13378 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13379 If ARG_SINP and ARG_COSP are NULL then the result is returned
13380 as a complex value.
13381 The type is taken from the type of ARG and is used for setting the
13382 precision of the calculation and results. */
13384 static tree
13385 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13387 tree const type = TREE_TYPE (arg);
13388 tree result = NULL_TREE;
13390 STRIP_NOPS (arg);
13392 /* To proceed, MPFR must exactly represent the target floating point
13393 format, which only happens when the target base equals two. */
13394 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13395 && TREE_CODE (arg) == REAL_CST
13396 && !TREE_OVERFLOW (arg))
13398 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13400 if (real_isfinite (ra))
13402 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13403 const int prec = fmt->p;
13404 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13405 tree result_s, result_c;
13406 int inexact;
13407 mpfr_t m, ms, mc;
13409 mpfr_inits2 (prec, m, ms, mc, NULL);
13410 mpfr_from_real (m, ra, GMP_RNDN);
13411 mpfr_clear_flags ();
13412 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13413 result_s = do_mpfr_ckconv (ms, type, inexact);
13414 result_c = do_mpfr_ckconv (mc, type, inexact);
13415 mpfr_clears (m, ms, mc, NULL);
13416 if (result_s && result_c)
13418 /* If we are to return in a complex value do so. */
13419 if (!arg_sinp && !arg_cosp)
13420 return build_complex (build_complex_type (type),
13421 result_c, result_s);
13423 /* Dereference the sin/cos pointer arguments. */
13424 arg_sinp = build_fold_indirect_ref (arg_sinp);
13425 arg_cosp = build_fold_indirect_ref (arg_cosp);
13426 /* Proceed if valid pointer type were passed in. */
13427 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13428 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13430 /* Set the values. */
13431 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13432 result_s);
13433 TREE_SIDE_EFFECTS (result_s) = 1;
13434 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13435 result_c);
13436 TREE_SIDE_EFFECTS (result_c) = 1;
13437 /* Combine the assignments into a compound expr. */
13438 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13439 result_s, result_c));
13444 return result;
13447 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13448 two-argument mpfr order N Bessel function FUNC on them and return
13449 the resulting value as a tree with type TYPE. The mpfr precision
13450 is set to the precision of TYPE. We assume that function FUNC
13451 returns zero if the result could be calculated exactly within the
13452 requested precision. */
13453 static tree
13454 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13455 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13456 const REAL_VALUE_TYPE *min, bool inclusive)
13458 tree result = NULL_TREE;
13460 STRIP_NOPS (arg1);
13461 STRIP_NOPS (arg2);
13463 /* To proceed, MPFR must exactly represent the target floating point
13464 format, which only happens when the target base equals two. */
13465 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13466 && host_integerp (arg1, 0)
13467 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13469 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13470 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13472 if (n == (long)n
13473 && real_isfinite (ra)
13474 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13476 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13477 const int prec = fmt->p;
13478 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13479 int inexact;
13480 mpfr_t m;
13482 mpfr_init2 (m, prec);
13483 mpfr_from_real (m, ra, GMP_RNDN);
13484 mpfr_clear_flags ();
13485 inexact = func (m, n, m, rnd);
13486 result = do_mpfr_ckconv (m, type, inexact);
13487 mpfr_clear (m);
13491 return result;
13494 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13495 the pointer *(ARG_QUO) and return the result. The type is taken
13496 from the type of ARG0 and is used for setting the precision of the
13497 calculation and results. */
13499 static tree
13500 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13502 tree const type = TREE_TYPE (arg0);
13503 tree result = NULL_TREE;
13505 STRIP_NOPS (arg0);
13506 STRIP_NOPS (arg1);
13508 /* To proceed, MPFR must exactly represent the target floating point
13509 format, which only happens when the target base equals two. */
13510 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13511 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13512 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13514 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13515 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13517 if (real_isfinite (ra0) && real_isfinite (ra1))
13519 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13520 const int prec = fmt->p;
13521 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13522 tree result_rem;
13523 long integer_quo;
13524 mpfr_t m0, m1;
13526 mpfr_inits2 (prec, m0, m1, NULL);
13527 mpfr_from_real (m0, ra0, GMP_RNDN);
13528 mpfr_from_real (m1, ra1, GMP_RNDN);
13529 mpfr_clear_flags ();
13530 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13531 /* Remquo is independent of the rounding mode, so pass
13532 inexact=0 to do_mpfr_ckconv(). */
13533 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13534 mpfr_clears (m0, m1, NULL);
13535 if (result_rem)
13537 /* MPFR calculates quo in the host's long so it may
13538 return more bits in quo than the target int can hold
13539 if sizeof(host long) > sizeof(target int). This can
13540 happen even for native compilers in LP64 mode. In
13541 these cases, modulo the quo value with the largest
13542 number that the target int can hold while leaving one
13543 bit for the sign. */
13544 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13545 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13547 /* Dereference the quo pointer argument. */
13548 arg_quo = build_fold_indirect_ref (arg_quo);
13549 /* Proceed iff a valid pointer type was passed in. */
13550 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13552 /* Set the value. */
13553 tree result_quo = fold_build2 (MODIFY_EXPR,
13554 TREE_TYPE (arg_quo), arg_quo,
13555 build_int_cst (NULL, integer_quo));
13556 TREE_SIDE_EFFECTS (result_quo) = 1;
13557 /* Combine the quo assignment with the rem. */
13558 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13559 result_quo, result_rem));
13564 return result;
13567 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13568 resulting value as a tree with type TYPE. The mpfr precision is
13569 set to the precision of TYPE. We assume that this mpfr function
13570 returns zero if the result could be calculated exactly within the
13571 requested precision. In addition, the integer pointer represented
13572 by ARG_SG will be dereferenced and set to the appropriate signgam
13573 (-1,1) value. */
13575 static tree
13576 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13578 tree result = NULL_TREE;
13580 STRIP_NOPS (arg);
13582 /* To proceed, MPFR must exactly represent the target floating point
13583 format, which only happens when the target base equals two. Also
13584 verify ARG is a constant and that ARG_SG is an int pointer. */
13585 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13586 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13587 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13588 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13590 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13592 /* In addition to NaN and Inf, the argument cannot be zero or a
13593 negative integer. */
13594 if (real_isfinite (ra)
13595 && ra->cl != rvc_zero
13596 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13598 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13599 const int prec = fmt->p;
13600 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13601 int inexact, sg;
13602 mpfr_t m;
13603 tree result_lg;
13605 mpfr_init2 (m, prec);
13606 mpfr_from_real (m, ra, GMP_RNDN);
13607 mpfr_clear_flags ();
13608 inexact = mpfr_lgamma (m, &sg, m, rnd);
13609 result_lg = do_mpfr_ckconv (m, type, inexact);
13610 mpfr_clear (m);
13611 if (result_lg)
13613 tree result_sg;
13615 /* Dereference the arg_sg pointer argument. */
13616 arg_sg = build_fold_indirect_ref (arg_sg);
13617 /* Assign the signgam value into *arg_sg. */
13618 result_sg = fold_build2 (MODIFY_EXPR,
13619 TREE_TYPE (arg_sg), arg_sg,
13620 build_int_cst (NULL, sg));
13621 TREE_SIDE_EFFECTS (result_sg) = 1;
13622 /* Combine the signgam assignment with the lgamma result. */
13623 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13624 result_sg, result_lg));
13629 return result;
13632 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13633 function FUNC on it and return the resulting value as a tree with
13634 type TYPE. The mpfr precision is set to the precision of TYPE. We
13635 assume that function FUNC returns zero if the result could be
13636 calculated exactly within the requested precision. */
13638 static tree
13639 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13641 tree result = NULL_TREE;
13643 STRIP_NOPS (arg);
13645 /* To proceed, MPFR must exactly represent the target floating point
13646 format, which only happens when the target base equals two. */
13647 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13648 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13649 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13651 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13652 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13654 if (real_isfinite (re) && real_isfinite (im))
13656 const struct real_format *const fmt =
13657 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13658 const int prec = fmt->p;
13659 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13660 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13661 int inexact;
13662 mpc_t m;
13664 mpc_init2 (m, prec);
13665 mpfr_from_real (mpc_realref(m), re, rnd);
13666 mpfr_from_real (mpc_imagref(m), im, rnd);
13667 mpfr_clear_flags ();
13668 inexact = func (m, m, crnd);
13669 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13670 mpc_clear (m);
13674 return result;
13677 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13678 mpc function FUNC on it and return the resulting value as a tree
13679 with type TYPE. The mpfr precision is set to the precision of
13680 TYPE. We assume that function FUNC returns zero if the result
13681 could be calculated exactly within the requested precision. If
13682 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13683 in the arguments and/or results. */
13685 tree
13686 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13687 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13689 tree result = NULL_TREE;
13691 STRIP_NOPS (arg0);
13692 STRIP_NOPS (arg1);
13694 /* To proceed, MPFR must exactly represent the target floating point
13695 format, which only happens when the target base equals two. */
13696 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13697 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13698 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13699 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13700 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13702 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13703 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13704 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13705 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13707 if (do_nonfinite
13708 || (real_isfinite (re0) && real_isfinite (im0)
13709 && real_isfinite (re1) && real_isfinite (im1)))
13711 const struct real_format *const fmt =
13712 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13713 const int prec = fmt->p;
13714 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13715 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13716 int inexact;
13717 mpc_t m0, m1;
13719 mpc_init2 (m0, prec);
13720 mpc_init2 (m1, prec);
13721 mpfr_from_real (mpc_realref(m0), re0, rnd);
13722 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13723 mpfr_from_real (mpc_realref(m1), re1, rnd);
13724 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13725 mpfr_clear_flags ();
13726 inexact = func (m0, m0, m1, crnd);
13727 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13728 mpc_clear (m0);
13729 mpc_clear (m1);
13733 return result;
13736 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13737 a normal call should be emitted rather than expanding the function
13738 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13740 static tree
13741 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13743 int nargs = gimple_call_num_args (stmt);
13745 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
13746 (nargs > 0
13747 ? gimple_call_arg_ptr (stmt, 0)
13748 : &error_mark_node), fcode);
13751 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13752 a normal call should be emitted rather than expanding the function
13753 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13754 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13755 passed as second argument. */
13757 tree
13758 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13759 enum built_in_function fcode)
13761 int nargs = gimple_call_num_args (stmt);
13763 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
13764 (nargs > 0
13765 ? gimple_call_arg_ptr (stmt, 0)
13766 : &error_mark_node), maxlen, fcode);
13769 /* Builtins with folding operations that operate on "..." arguments
13770 need special handling; we need to store the arguments in a convenient
13771 data structure before attempting any folding. Fortunately there are
13772 only a few builtins that fall into this category. FNDECL is the
13773 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13774 result of the function call is ignored. */
13776 static tree
13777 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13778 bool ignore ATTRIBUTE_UNUSED)
13780 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13781 tree ret = NULL_TREE;
13783 switch (fcode)
13785 case BUILT_IN_SPRINTF_CHK:
13786 case BUILT_IN_VSPRINTF_CHK:
13787 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13788 break;
13790 case BUILT_IN_SNPRINTF_CHK:
13791 case BUILT_IN_VSNPRINTF_CHK:
13792 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13794 default:
13795 break;
13797 if (ret)
13799 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13800 TREE_NO_WARNING (ret) = 1;
13801 return ret;
13803 return NULL_TREE;
13806 /* A wrapper function for builtin folding that prevents warnings for
13807 "statement without effect" and the like, caused by removing the
13808 call node earlier than the warning is generated. */
13810 tree
13811 fold_call_stmt (gimple stmt, bool ignore)
13813 tree ret = NULL_TREE;
13814 tree fndecl = gimple_call_fndecl (stmt);
13815 location_t loc = gimple_location (stmt);
13816 if (fndecl
13817 && TREE_CODE (fndecl) == FUNCTION_DECL
13818 && DECL_BUILT_IN (fndecl)
13819 && !gimple_call_va_arg_pack_p (stmt))
13821 int nargs = gimple_call_num_args (stmt);
13822 tree *args = (nargs > 0
13823 ? gimple_call_arg_ptr (stmt, 0)
13824 : &error_mark_node);
13826 if (avoid_folding_inline_builtin (fndecl))
13827 return NULL_TREE;
13828 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13830 return targetm.fold_builtin (fndecl, nargs, args, ignore);
13832 else
13834 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13835 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13836 if (!ret)
13837 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13838 if (ret)
13840 /* Propagate location information from original call to
13841 expansion of builtin. Otherwise things like
13842 maybe_emit_chk_warning, that operate on the expansion
13843 of a builtin, will use the wrong location information. */
13844 if (gimple_has_location (stmt))
13846 tree realret = ret;
13847 if (TREE_CODE (ret) == NOP_EXPR)
13848 realret = TREE_OPERAND (ret, 0);
13849 if (CAN_HAVE_LOCATION_P (realret)
13850 && !EXPR_HAS_LOCATION (realret))
13851 SET_EXPR_LOCATION (realret, loc);
13852 return realret;
13854 return ret;
13858 return NULL_TREE;
13861 /* Look up the function in built_in_decls that corresponds to DECL
13862 and set ASMSPEC as its user assembler name. DECL must be a
13863 function decl that declares a builtin. */
13865 void
13866 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13868 tree builtin;
13869 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13870 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13871 && asmspec != 0);
13873 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13874 set_user_assembler_name (builtin, asmspec);
13875 switch (DECL_FUNCTION_CODE (decl))
13877 case BUILT_IN_MEMCPY:
13878 init_block_move_fn (asmspec);
13879 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13880 break;
13881 case BUILT_IN_MEMSET:
13882 init_block_clear_fn (asmspec);
13883 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13884 break;
13885 case BUILT_IN_MEMMOVE:
13886 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13887 break;
13888 case BUILT_IN_MEMCMP:
13889 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13890 break;
13891 case BUILT_IN_ABORT:
13892 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13893 break;
13894 case BUILT_IN_FFS:
13895 if (INT_TYPE_SIZE < BITS_PER_WORD)
13897 set_user_assembler_libfunc ("ffs", asmspec);
13898 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13899 MODE_INT, 0), "ffs");
13901 break;
13902 default:
13903 break;
13907 /* Return true if DECL is a builtin that expands to a constant or similarly
13908 simple code. */
13909 bool
13910 is_simple_builtin (tree decl)
13912 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13913 switch (DECL_FUNCTION_CODE (decl))
13915 /* Builtins that expand to constants. */
13916 case BUILT_IN_CONSTANT_P:
13917 case BUILT_IN_EXPECT:
13918 case BUILT_IN_OBJECT_SIZE:
13919 case BUILT_IN_UNREACHABLE:
13920 /* Simple register moves or loads from stack. */
13921 case BUILT_IN_RETURN_ADDRESS:
13922 case BUILT_IN_EXTRACT_RETURN_ADDR:
13923 case BUILT_IN_FROB_RETURN_ADDR:
13924 case BUILT_IN_RETURN:
13925 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13926 case BUILT_IN_FRAME_ADDRESS:
13927 case BUILT_IN_VA_END:
13928 case BUILT_IN_STACK_SAVE:
13929 case BUILT_IN_STACK_RESTORE:
13930 /* Exception state returns or moves registers around. */
13931 case BUILT_IN_EH_FILTER:
13932 case BUILT_IN_EH_POINTER:
13933 case BUILT_IN_EH_COPY_VALUES:
13934 return true;
13936 default:
13937 return false;
13940 return false;
13943 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13944 most probably expanded inline into reasonably simple code. This is a
13945 superset of is_simple_builtin. */
13946 bool
13947 is_inexpensive_builtin (tree decl)
13949 if (!decl)
13950 return false;
13951 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13952 return true;
13953 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13954 switch (DECL_FUNCTION_CODE (decl))
13956 case BUILT_IN_ABS:
13957 case BUILT_IN_ALLOCA:
13958 case BUILT_IN_BSWAP32:
13959 case BUILT_IN_BSWAP64:
13960 case BUILT_IN_CLZ:
13961 case BUILT_IN_CLZIMAX:
13962 case BUILT_IN_CLZL:
13963 case BUILT_IN_CLZLL:
13964 case BUILT_IN_CTZ:
13965 case BUILT_IN_CTZIMAX:
13966 case BUILT_IN_CTZL:
13967 case BUILT_IN_CTZLL:
13968 case BUILT_IN_FFS:
13969 case BUILT_IN_FFSIMAX:
13970 case BUILT_IN_FFSL:
13971 case BUILT_IN_FFSLL:
13972 case BUILT_IN_IMAXABS:
13973 case BUILT_IN_FINITE:
13974 case BUILT_IN_FINITEF:
13975 case BUILT_IN_FINITEL:
13976 case BUILT_IN_FINITED32:
13977 case BUILT_IN_FINITED64:
13978 case BUILT_IN_FINITED128:
13979 case BUILT_IN_FPCLASSIFY:
13980 case BUILT_IN_ISFINITE:
13981 case BUILT_IN_ISINF_SIGN:
13982 case BUILT_IN_ISINF:
13983 case BUILT_IN_ISINFF:
13984 case BUILT_IN_ISINFL:
13985 case BUILT_IN_ISINFD32:
13986 case BUILT_IN_ISINFD64:
13987 case BUILT_IN_ISINFD128:
13988 case BUILT_IN_ISNAN:
13989 case BUILT_IN_ISNANF:
13990 case BUILT_IN_ISNANL:
13991 case BUILT_IN_ISNAND32:
13992 case BUILT_IN_ISNAND64:
13993 case BUILT_IN_ISNAND128:
13994 case BUILT_IN_ISNORMAL:
13995 case BUILT_IN_ISGREATER:
13996 case BUILT_IN_ISGREATEREQUAL:
13997 case BUILT_IN_ISLESS:
13998 case BUILT_IN_ISLESSEQUAL:
13999 case BUILT_IN_ISLESSGREATER:
14000 case BUILT_IN_ISUNORDERED:
14001 case BUILT_IN_VA_ARG_PACK:
14002 case BUILT_IN_VA_ARG_PACK_LEN:
14003 case BUILT_IN_VA_COPY:
14004 case BUILT_IN_TRAP:
14005 case BUILT_IN_SAVEREGS:
14006 case BUILT_IN_POPCOUNTL:
14007 case BUILT_IN_POPCOUNTLL:
14008 case BUILT_IN_POPCOUNTIMAX:
14009 case BUILT_IN_POPCOUNT:
14010 case BUILT_IN_PARITYL:
14011 case BUILT_IN_PARITYLL:
14012 case BUILT_IN_PARITYIMAX:
14013 case BUILT_IN_PARITY:
14014 case BUILT_IN_LABS:
14015 case BUILT_IN_LLABS:
14016 case BUILT_IN_PREFETCH:
14017 return true;
14019 default:
14020 return is_simple_builtin (decl);
14023 return false;