* PR target/49903
[official-gcc.git] / gcc / builtins.c
blobe0afc908d5af93a94fe816e6c9fc2e9dce1add79
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
52 #include "builtins.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
57 #endif
58 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
60 struct target_builtins default_target_builtins;
61 #if SWITCHABLE_TARGET
62 struct target_builtins *this_target_builtins = &default_target_builtins;
63 #endif
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
74 #undef DEF_BUILTIN
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
92 #endif
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
106 static rtx expand_builtin_interclass_mathfn (tree, rtx);
107 static rtx expand_builtin_sincos (tree);
108 static rtx expand_builtin_cexpi (tree, rtx);
109 static rtx expand_builtin_int_roundingfn (tree, rtx);
110 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcmp (tree, rtx);
117 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_memcpy (tree, rtx);
120 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
122 enum machine_mode, int);
123 static rtx expand_builtin_strcpy (tree, rtx);
124 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
125 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strncpy (tree, rtx);
127 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
128 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
130 static rtx expand_builtin_bzero (tree);
131 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_alloca (tree, bool);
133 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
134 static rtx expand_builtin_frame_address (tree, tree);
135 static tree stabilize_va_list_loc (location_t, tree, int);
136 static rtx expand_builtin_expect (tree, rtx);
137 static tree fold_builtin_constant_p (tree);
138 static tree fold_builtin_expect (location_t, tree, tree);
139 static tree fold_builtin_classify_type (tree);
140 static tree fold_builtin_strlen (location_t, tree, tree);
141 static tree fold_builtin_inf (location_t, tree, int);
142 static tree fold_builtin_nan (tree, tree, int);
143 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
144 static bool validate_arg (const_tree, enum tree_code code);
145 static bool integer_valued_real_p (tree);
146 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
147 static bool readonly_data_expr (tree);
148 static rtx expand_builtin_fabs (tree, rtx, rtx);
149 static rtx expand_builtin_signbit (tree, rtx);
150 static tree fold_builtin_sqrt (location_t, tree, tree);
151 static tree fold_builtin_cbrt (location_t, tree, tree);
152 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
153 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
154 static tree fold_builtin_cos (location_t, tree, tree, tree);
155 static tree fold_builtin_cosh (location_t, tree, tree, tree);
156 static tree fold_builtin_tan (tree, tree);
157 static tree fold_builtin_trunc (location_t, tree, tree);
158 static tree fold_builtin_floor (location_t, tree, tree);
159 static tree fold_builtin_ceil (location_t, tree, tree);
160 static tree fold_builtin_round (location_t, tree, tree);
161 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
162 static tree fold_builtin_bitop (tree, tree);
163 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
164 static tree fold_builtin_strchr (location_t, tree, tree, tree);
165 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
167 static tree fold_builtin_strcmp (location_t, tree, tree);
168 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
169 static tree fold_builtin_signbit (location_t, tree, tree);
170 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_isascii (location_t, tree);
172 static tree fold_builtin_toascii (location_t, tree);
173 static tree fold_builtin_isdigit (location_t, tree);
174 static tree fold_builtin_fabs (location_t, tree, tree);
175 static tree fold_builtin_abs (location_t, tree, tree);
176 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
177 enum tree_code);
178 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
179 static tree fold_builtin_0 (location_t, tree, bool);
180 static tree fold_builtin_1 (location_t, tree, tree, bool);
181 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
182 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
183 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
184 static tree fold_builtin_varargs (location_t, tree, tree, bool);
186 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
187 static tree fold_builtin_strstr (location_t, tree, tree, tree);
188 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
189 static tree fold_builtin_strcat (location_t, tree, tree);
190 static tree fold_builtin_strncat (location_t, tree, tree, tree);
191 static tree fold_builtin_strspn (location_t, tree, tree);
192 static tree fold_builtin_strcspn (location_t, tree, tree);
193 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
194 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
196 static rtx expand_builtin_object_size (tree);
197 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
198 enum built_in_function);
199 static void maybe_emit_chk_warning (tree, enum built_in_function);
200 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
201 static void maybe_emit_free_warning (tree);
202 static tree fold_builtin_object_size (tree, tree);
203 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
204 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
205 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
206 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
207 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
208 enum built_in_function);
209 static bool init_target_chars (void);
211 static unsigned HOST_WIDE_INT target_newline;
212 static unsigned HOST_WIDE_INT target_percent;
213 static unsigned HOST_WIDE_INT target_c;
214 static unsigned HOST_WIDE_INT target_s;
215 static char target_percent_c[3];
216 static char target_percent_s[3];
217 static char target_percent_s_newline[4];
218 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
219 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
220 static tree do_mpfr_arg2 (tree, tree, tree,
221 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
222 static tree do_mpfr_arg3 (tree, tree, tree, tree,
223 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
224 static tree do_mpfr_sincos (tree, tree, tree);
225 static tree do_mpfr_bessel_n (tree, tree, tree,
226 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_remquo (tree, tree, tree);
229 static tree do_mpfr_lgamma_r (tree, tree, tree);
231 /* Return true if NAME starts with __builtin_ or __sync_. */
233 bool
234 is_builtin_name (const char *name)
236 if (strncmp (name, "__builtin_", 10) == 0)
237 return true;
238 if (strncmp (name, "__sync_", 7) == 0)
239 return true;
240 return false;
244 /* Return true if DECL is a function symbol representing a built-in. */
246 bool
247 is_builtin_fn (tree decl)
249 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
253 /* Return true if NODE should be considered for inline expansion regardless
254 of the optimization level. This means whenever a function is invoked with
255 its "internal" name, which normally contains the prefix "__builtin". */
257 static bool
258 called_as_built_in (tree node)
260 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
261 we want the name used to call the function, not the name it
262 will have. */
263 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
264 return is_builtin_name (name);
267 /* Compute values M and N such that M divides (address of EXP - N) and
268 such that N < M. Store N in *BITPOSP and return M.
270 Note that the address (and thus the alignment) computed here is based
271 on the address to which a symbol resolves, whereas DECL_ALIGN is based
272 on the address at which an object is actually located. These two
273 addresses are not always the same. For example, on ARM targets,
274 the address &foo of a Thumb function foo() has the lowest bit set,
275 whereas foo() itself starts on an even address. */
277 unsigned int
278 get_object_alignment_1 (tree exp, unsigned HOST_WIDE_INT *bitposp)
280 HOST_WIDE_INT bitsize, bitpos;
281 tree offset;
282 enum machine_mode mode;
283 int unsignedp, volatilep;
284 unsigned int align, inner;
286 /* Get the innermost object and the constant (bitpos) and possibly
287 variable (offset) offset of the access. */
288 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
289 &mode, &unsignedp, &volatilep, true);
291 /* Extract alignment information from the innermost object and
292 possibly adjust bitpos and offset. */
293 if (TREE_CODE (exp) == CONST_DECL)
294 exp = DECL_INITIAL (exp);
295 if (DECL_P (exp)
296 && TREE_CODE (exp) != LABEL_DECL)
298 if (TREE_CODE (exp) == FUNCTION_DECL)
300 /* Function addresses can encode extra information besides their
301 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
302 allows the low bit to be used as a virtual bit, we know
303 that the address itself must be 2-byte aligned. */
304 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
305 align = 2 * BITS_PER_UNIT;
306 else
307 align = BITS_PER_UNIT;
309 else
310 align = DECL_ALIGN (exp);
312 else if (CONSTANT_CLASS_P (exp))
314 align = TYPE_ALIGN (TREE_TYPE (exp));
315 #ifdef CONSTANT_ALIGNMENT
316 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
317 #endif
319 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
320 align = TYPE_ALIGN (TREE_TYPE (exp));
321 else if (TREE_CODE (exp) == INDIRECT_REF)
322 align = TYPE_ALIGN (TREE_TYPE (exp));
323 else if (TREE_CODE (exp) == MEM_REF)
325 tree addr = TREE_OPERAND (exp, 0);
326 struct ptr_info_def *pi;
327 if (TREE_CODE (addr) == BIT_AND_EXPR
328 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
330 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
331 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
332 align *= BITS_PER_UNIT;
333 addr = TREE_OPERAND (addr, 0);
335 else
336 align = BITS_PER_UNIT;
337 if (TREE_CODE (addr) == SSA_NAME
338 && (pi = SSA_NAME_PTR_INFO (addr)))
340 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
341 align = MAX (pi->align * BITS_PER_UNIT, align);
343 else if (TREE_CODE (addr) == ADDR_EXPR)
344 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0)));
345 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
347 else if (TREE_CODE (exp) == TARGET_MEM_REF)
349 struct ptr_info_def *pi;
350 tree addr = TMR_BASE (exp);
351 if (TREE_CODE (addr) == BIT_AND_EXPR
352 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
354 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
355 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
356 align *= BITS_PER_UNIT;
357 addr = TREE_OPERAND (addr, 0);
359 else
360 align = BITS_PER_UNIT;
361 if (TREE_CODE (addr) == SSA_NAME
362 && (pi = SSA_NAME_PTR_INFO (addr)))
364 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
365 align = MAX (pi->align * BITS_PER_UNIT, align);
367 else if (TREE_CODE (addr) == ADDR_EXPR)
368 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0)));
369 if (TMR_OFFSET (exp))
370 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
371 if (TMR_INDEX (exp) && TMR_STEP (exp))
373 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
374 align = MIN (align, (step & -step) * BITS_PER_UNIT);
376 else if (TMR_INDEX (exp))
377 align = BITS_PER_UNIT;
378 if (TMR_INDEX2 (exp))
379 align = BITS_PER_UNIT;
381 else
382 align = BITS_PER_UNIT;
384 /* If there is a non-constant offset part extract the maximum
385 alignment that can prevail. */
386 inner = ~0U;
387 while (offset)
389 tree next_offset;
391 if (TREE_CODE (offset) == PLUS_EXPR)
393 next_offset = TREE_OPERAND (offset, 0);
394 offset = TREE_OPERAND (offset, 1);
396 else
397 next_offset = NULL;
398 if (host_integerp (offset, 1))
400 /* Any overflow in calculating offset_bits won't change
401 the alignment. */
402 unsigned offset_bits
403 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
405 if (offset_bits)
406 inner = MIN (inner, (offset_bits & -offset_bits));
408 else if (TREE_CODE (offset) == MULT_EXPR
409 && host_integerp (TREE_OPERAND (offset, 1), 1))
411 /* Any overflow in calculating offset_factor won't change
412 the alignment. */
413 unsigned offset_factor
414 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
415 * BITS_PER_UNIT);
417 if (offset_factor)
418 inner = MIN (inner, (offset_factor & -offset_factor));
420 else
422 inner = MIN (inner, BITS_PER_UNIT);
423 break;
425 offset = next_offset;
428 /* Alignment is innermost object alignment adjusted by the constant
429 and non-constant offset parts. */
430 align = MIN (align, inner);
431 bitpos = bitpos & (align - 1);
433 *bitposp = bitpos;
434 return align;
437 /* Return the alignment in bits of EXP, an object. */
439 unsigned int
440 get_object_alignment (tree exp)
442 unsigned HOST_WIDE_INT bitpos = 0;
443 unsigned int align;
445 align = get_object_alignment_1 (exp, &bitpos);
447 /* align and bitpos now specify known low bits of the pointer.
448 ptr & (align - 1) == bitpos. */
450 if (bitpos != 0)
451 align = (bitpos & -bitpos);
453 return align;
456 /* Return the alignment in bits of EXP, a pointer valued expression.
457 The alignment returned is, by default, the alignment of the thing that
458 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
460 Otherwise, look at the expression to see if we can do better, i.e., if the
461 expression is actually pointing at an object whose alignment is tighter. */
463 unsigned int
464 get_pointer_alignment (tree exp)
466 STRIP_NOPS (exp);
468 if (TREE_CODE (exp) == ADDR_EXPR)
469 return get_object_alignment (TREE_OPERAND (exp, 0));
470 else if (TREE_CODE (exp) == SSA_NAME
471 && POINTER_TYPE_P (TREE_TYPE (exp)))
473 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
474 unsigned align;
475 if (!pi)
476 return BITS_PER_UNIT;
477 if (pi->misalign != 0)
478 align = (pi->misalign & -pi->misalign);
479 else
480 align = pi->align;
481 return align * BITS_PER_UNIT;
484 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
487 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
488 way, because it could contain a zero byte in the middle.
489 TREE_STRING_LENGTH is the size of the character array, not the string.
491 ONLY_VALUE should be nonzero if the result is not going to be emitted
492 into the instruction stream and zero if it is going to be expanded.
493 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
494 is returned, otherwise NULL, since
495 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
496 evaluate the side-effects.
498 The value returned is of type `ssizetype'.
500 Unfortunately, string_constant can't access the values of const char
501 arrays with initializers, so neither can we do so here. */
503 tree
504 c_strlen (tree src, int only_value)
506 tree offset_node;
507 HOST_WIDE_INT offset;
508 int max;
509 const char *ptr;
510 location_t loc;
512 STRIP_NOPS (src);
513 if (TREE_CODE (src) == COND_EXPR
514 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
516 tree len1, len2;
518 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
519 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
520 if (tree_int_cst_equal (len1, len2))
521 return len1;
524 if (TREE_CODE (src) == COMPOUND_EXPR
525 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
526 return c_strlen (TREE_OPERAND (src, 1), only_value);
528 loc = EXPR_LOC_OR_HERE (src);
530 src = string_constant (src, &offset_node);
531 if (src == 0)
532 return NULL_TREE;
534 max = TREE_STRING_LENGTH (src) - 1;
535 ptr = TREE_STRING_POINTER (src);
537 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
539 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
540 compute the offset to the following null if we don't know where to
541 start searching for it. */
542 int i;
544 for (i = 0; i < max; i++)
545 if (ptr[i] == 0)
546 return NULL_TREE;
548 /* We don't know the starting offset, but we do know that the string
549 has no internal zero bytes. We can assume that the offset falls
550 within the bounds of the string; otherwise, the programmer deserves
551 what he gets. Subtract the offset from the length of the string,
552 and return that. This would perhaps not be valid if we were dealing
553 with named arrays in addition to literal string constants. */
555 return size_diffop_loc (loc, size_int (max), offset_node);
558 /* We have a known offset into the string. Start searching there for
559 a null character if we can represent it as a single HOST_WIDE_INT. */
560 if (offset_node == 0)
561 offset = 0;
562 else if (! host_integerp (offset_node, 0))
563 offset = -1;
564 else
565 offset = tree_low_cst (offset_node, 0);
567 /* If the offset is known to be out of bounds, warn, and call strlen at
568 runtime. */
569 if (offset < 0 || offset > max)
571 /* Suppress multiple warnings for propagated constant strings. */
572 if (! TREE_NO_WARNING (src))
574 warning_at (loc, 0, "offset outside bounds of constant string");
575 TREE_NO_WARNING (src) = 1;
577 return NULL_TREE;
580 /* Use strlen to search for the first zero byte. Since any strings
581 constructed with build_string will have nulls appended, we win even
582 if we get handed something like (char[4])"abcd".
584 Since OFFSET is our starting index into the string, no further
585 calculation is needed. */
586 return ssize_int (strlen (ptr + offset));
589 /* Return a char pointer for a C string if it is a string constant
590 or sum of string constant and integer constant. */
592 static const char *
593 c_getstr (tree src)
595 tree offset_node;
597 src = string_constant (src, &offset_node);
598 if (src == 0)
599 return 0;
601 if (offset_node == 0)
602 return TREE_STRING_POINTER (src);
603 else if (!host_integerp (offset_node, 1)
604 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
605 return 0;
607 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
610 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
611 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
613 static rtx
614 c_readstr (const char *str, enum machine_mode mode)
616 HOST_WIDE_INT c[2];
617 HOST_WIDE_INT ch;
618 unsigned int i, j;
620 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
622 c[0] = 0;
623 c[1] = 0;
624 ch = 1;
625 for (i = 0; i < GET_MODE_SIZE (mode); i++)
627 j = i;
628 if (WORDS_BIG_ENDIAN)
629 j = GET_MODE_SIZE (mode) - i - 1;
630 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
631 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
632 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
633 j *= BITS_PER_UNIT;
634 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
636 if (ch)
637 ch = (unsigned char) str[i];
638 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
640 return immed_double_const (c[0], c[1], mode);
643 /* Cast a target constant CST to target CHAR and if that value fits into
644 host char type, return zero and put that value into variable pointed to by
645 P. */
647 static int
648 target_char_cast (tree cst, char *p)
650 unsigned HOST_WIDE_INT val, hostval;
652 if (TREE_CODE (cst) != INTEGER_CST
653 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
654 return 1;
656 val = TREE_INT_CST_LOW (cst);
657 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
658 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
660 hostval = val;
661 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
662 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
664 if (val != hostval)
665 return 1;
667 *p = hostval;
668 return 0;
671 /* Similar to save_expr, but assumes that arbitrary code is not executed
672 in between the multiple evaluations. In particular, we assume that a
673 non-addressable local variable will not be modified. */
675 static tree
676 builtin_save_expr (tree exp)
678 if (TREE_CODE (exp) == SSA_NAME
679 || (TREE_ADDRESSABLE (exp) == 0
680 && (TREE_CODE (exp) == PARM_DECL
681 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
682 return exp;
684 return save_expr (exp);
687 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
688 times to get the address of either a higher stack frame, or a return
689 address located within it (depending on FNDECL_CODE). */
691 static rtx
692 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
694 int i;
696 #ifdef INITIAL_FRAME_ADDRESS_RTX
697 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
698 #else
699 rtx tem;
701 /* For a zero count with __builtin_return_address, we don't care what
702 frame address we return, because target-specific definitions will
703 override us. Therefore frame pointer elimination is OK, and using
704 the soft frame pointer is OK.
706 For a nonzero count, or a zero count with __builtin_frame_address,
707 we require a stable offset from the current frame pointer to the
708 previous one, so we must use the hard frame pointer, and
709 we must disable frame pointer elimination. */
710 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
711 tem = frame_pointer_rtx;
712 else
714 tem = hard_frame_pointer_rtx;
716 /* Tell reload not to eliminate the frame pointer. */
717 crtl->accesses_prior_frames = 1;
719 #endif
721 /* Some machines need special handling before we can access
722 arbitrary frames. For example, on the SPARC, we must first flush
723 all register windows to the stack. */
724 #ifdef SETUP_FRAME_ADDRESSES
725 if (count > 0)
726 SETUP_FRAME_ADDRESSES ();
727 #endif
729 /* On the SPARC, the return address is not in the frame, it is in a
730 register. There is no way to access it off of the current frame
731 pointer, but it can be accessed off the previous frame pointer by
732 reading the value from the register window save area. */
733 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
734 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
735 count--;
736 #endif
738 /* Scan back COUNT frames to the specified frame. */
739 for (i = 0; i < count; i++)
741 /* Assume the dynamic chain pointer is in the word that the
742 frame address points to, unless otherwise specified. */
743 #ifdef DYNAMIC_CHAIN_ADDRESS
744 tem = DYNAMIC_CHAIN_ADDRESS (tem);
745 #endif
746 tem = memory_address (Pmode, tem);
747 tem = gen_frame_mem (Pmode, tem);
748 tem = copy_to_reg (tem);
751 /* For __builtin_frame_address, return what we've got. But, on
752 the SPARC for example, we may have to add a bias. */
753 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
754 #ifdef FRAME_ADDR_RTX
755 return FRAME_ADDR_RTX (tem);
756 #else
757 return tem;
758 #endif
760 /* For __builtin_return_address, get the return address from that frame. */
761 #ifdef RETURN_ADDR_RTX
762 tem = RETURN_ADDR_RTX (count, tem);
763 #else
764 tem = memory_address (Pmode,
765 plus_constant (tem, GET_MODE_SIZE (Pmode)));
766 tem = gen_frame_mem (Pmode, tem);
767 #endif
768 return tem;
771 /* Alias set used for setjmp buffer. */
772 static alias_set_type setjmp_alias_set = -1;
774 /* Construct the leading half of a __builtin_setjmp call. Control will
775 return to RECEIVER_LABEL. This is also called directly by the SJLJ
776 exception handling code. */
778 void
779 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
781 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
782 rtx stack_save;
783 rtx mem;
785 if (setjmp_alias_set == -1)
786 setjmp_alias_set = new_alias_set ();
788 buf_addr = convert_memory_address (Pmode, buf_addr);
790 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
792 /* We store the frame pointer and the address of receiver_label in
793 the buffer and use the rest of it for the stack save area, which
794 is machine-dependent. */
796 mem = gen_rtx_MEM (Pmode, buf_addr);
797 set_mem_alias_set (mem, setjmp_alias_set);
798 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
800 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
801 set_mem_alias_set (mem, setjmp_alias_set);
803 emit_move_insn (validize_mem (mem),
804 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
806 stack_save = gen_rtx_MEM (sa_mode,
807 plus_constant (buf_addr,
808 2 * GET_MODE_SIZE (Pmode)));
809 set_mem_alias_set (stack_save, setjmp_alias_set);
810 emit_stack_save (SAVE_NONLOCAL, &stack_save);
812 /* If there is further processing to do, do it. */
813 #ifdef HAVE_builtin_setjmp_setup
814 if (HAVE_builtin_setjmp_setup)
815 emit_insn (gen_builtin_setjmp_setup (buf_addr));
816 #endif
818 /* We have a nonlocal label. */
819 cfun->has_nonlocal_label = 1;
822 /* Construct the trailing part of a __builtin_setjmp call. This is
823 also called directly by the SJLJ exception handling code. */
825 void
826 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
828 rtx chain;
830 /* Clobber the FP when we get here, so we have to make sure it's
831 marked as used by this function. */
832 emit_use (hard_frame_pointer_rtx);
834 /* Mark the static chain as clobbered here so life information
835 doesn't get messed up for it. */
836 chain = targetm.calls.static_chain (current_function_decl, true);
837 if (chain && REG_P (chain))
838 emit_clobber (chain);
840 /* Now put in the code to restore the frame pointer, and argument
841 pointer, if needed. */
842 #ifdef HAVE_nonlocal_goto
843 if (! HAVE_nonlocal_goto)
844 #endif
846 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
847 /* This might change the hard frame pointer in ways that aren't
848 apparent to early optimization passes, so force a clobber. */
849 emit_clobber (hard_frame_pointer_rtx);
852 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
853 if (fixed_regs[ARG_POINTER_REGNUM])
855 #ifdef ELIMINABLE_REGS
856 size_t i;
857 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
859 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
860 if (elim_regs[i].from == ARG_POINTER_REGNUM
861 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
862 break;
864 if (i == ARRAY_SIZE (elim_regs))
865 #endif
867 /* Now restore our arg pointer from the address at which it
868 was saved in our stack frame. */
869 emit_move_insn (crtl->args.internal_arg_pointer,
870 copy_to_reg (get_arg_pointer_save_area ()));
873 #endif
875 #ifdef HAVE_builtin_setjmp_receiver
876 if (HAVE_builtin_setjmp_receiver)
877 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
878 else
879 #endif
880 #ifdef HAVE_nonlocal_goto_receiver
881 if (HAVE_nonlocal_goto_receiver)
882 emit_insn (gen_nonlocal_goto_receiver ());
883 else
884 #endif
885 { /* Nothing */ }
887 /* We must not allow the code we just generated to be reordered by
888 scheduling. Specifically, the update of the frame pointer must
889 happen immediately, not later. */
890 emit_insn (gen_blockage ());
893 /* __builtin_longjmp is passed a pointer to an array of five words (not
894 all will be used on all machines). It operates similarly to the C
895 library function of the same name, but is more efficient. Much of
896 the code below is copied from the handling of non-local gotos. */
898 static void
899 expand_builtin_longjmp (rtx buf_addr, rtx value)
901 rtx fp, lab, stack, insn, last;
902 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
904 /* DRAP is needed for stack realign if longjmp is expanded to current
905 function */
906 if (SUPPORTS_STACK_ALIGNMENT)
907 crtl->need_drap = true;
909 if (setjmp_alias_set == -1)
910 setjmp_alias_set = new_alias_set ();
912 buf_addr = convert_memory_address (Pmode, buf_addr);
914 buf_addr = force_reg (Pmode, buf_addr);
916 /* We require that the user must pass a second argument of 1, because
917 that is what builtin_setjmp will return. */
918 gcc_assert (value == const1_rtx);
920 last = get_last_insn ();
921 #ifdef HAVE_builtin_longjmp
922 if (HAVE_builtin_longjmp)
923 emit_insn (gen_builtin_longjmp (buf_addr));
924 else
925 #endif
927 fp = gen_rtx_MEM (Pmode, buf_addr);
928 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
929 GET_MODE_SIZE (Pmode)));
931 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
932 2 * GET_MODE_SIZE (Pmode)));
933 set_mem_alias_set (fp, setjmp_alias_set);
934 set_mem_alias_set (lab, setjmp_alias_set);
935 set_mem_alias_set (stack, setjmp_alias_set);
937 /* Pick up FP, label, and SP from the block and jump. This code is
938 from expand_goto in stmt.c; see there for detailed comments. */
939 #ifdef HAVE_nonlocal_goto
940 if (HAVE_nonlocal_goto)
941 /* We have to pass a value to the nonlocal_goto pattern that will
942 get copied into the static_chain pointer, but it does not matter
943 what that value is, because builtin_setjmp does not use it. */
944 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
945 else
946 #endif
948 lab = copy_to_reg (lab);
950 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
951 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
953 emit_move_insn (hard_frame_pointer_rtx, fp);
954 emit_stack_restore (SAVE_NONLOCAL, stack);
956 emit_use (hard_frame_pointer_rtx);
957 emit_use (stack_pointer_rtx);
958 emit_indirect_jump (lab);
962 /* Search backwards and mark the jump insn as a non-local goto.
963 Note that this precludes the use of __builtin_longjmp to a
964 __builtin_setjmp target in the same function. However, we've
965 already cautioned the user that these functions are for
966 internal exception handling use only. */
967 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
969 gcc_assert (insn != last);
971 if (JUMP_P (insn))
973 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
974 break;
976 else if (CALL_P (insn))
977 break;
981 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
982 and the address of the save area. */
984 static rtx
985 expand_builtin_nonlocal_goto (tree exp)
987 tree t_label, t_save_area;
988 rtx r_label, r_save_area, r_fp, r_sp, insn;
990 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
991 return NULL_RTX;
993 t_label = CALL_EXPR_ARG (exp, 0);
994 t_save_area = CALL_EXPR_ARG (exp, 1);
996 r_label = expand_normal (t_label);
997 r_label = convert_memory_address (Pmode, r_label);
998 r_save_area = expand_normal (t_save_area);
999 r_save_area = convert_memory_address (Pmode, r_save_area);
1000 /* Copy the address of the save location to a register just in case it was
1001 based on the frame pointer. */
1002 r_save_area = copy_to_reg (r_save_area);
1003 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1004 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1005 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
1007 crtl->has_nonlocal_goto = 1;
1009 #ifdef HAVE_nonlocal_goto
1010 /* ??? We no longer need to pass the static chain value, afaik. */
1011 if (HAVE_nonlocal_goto)
1012 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1013 else
1014 #endif
1016 r_label = copy_to_reg (r_label);
1018 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1019 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1021 /* Restore frame pointer for containing function. */
1022 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1023 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1025 /* USE of hard_frame_pointer_rtx added for consistency;
1026 not clear if really needed. */
1027 emit_use (hard_frame_pointer_rtx);
1028 emit_use (stack_pointer_rtx);
1030 /* If the architecture is using a GP register, we must
1031 conservatively assume that the target function makes use of it.
1032 The prologue of functions with nonlocal gotos must therefore
1033 initialize the GP register to the appropriate value, and we
1034 must then make sure that this value is live at the point
1035 of the jump. (Note that this doesn't necessarily apply
1036 to targets with a nonlocal_goto pattern; they are free
1037 to implement it in their own way. Note also that this is
1038 a no-op if the GP register is a global invariant.) */
1039 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1040 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1041 emit_use (pic_offset_table_rtx);
1043 emit_indirect_jump (r_label);
1046 /* Search backwards to the jump insn and mark it as a
1047 non-local goto. */
1048 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1050 if (JUMP_P (insn))
1052 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1053 break;
1055 else if (CALL_P (insn))
1056 break;
1059 return const0_rtx;
1062 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1063 (not all will be used on all machines) that was passed to __builtin_setjmp.
1064 It updates the stack pointer in that block to correspond to the current
1065 stack pointer. */
1067 static void
1068 expand_builtin_update_setjmp_buf (rtx buf_addr)
1070 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1071 rtx stack_save
1072 = gen_rtx_MEM (sa_mode,
1073 memory_address
1074 (sa_mode,
1075 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1077 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1080 /* Expand a call to __builtin_prefetch. For a target that does not support
1081 data prefetch, evaluate the memory address argument in case it has side
1082 effects. */
1084 static void
1085 expand_builtin_prefetch (tree exp)
1087 tree arg0, arg1, arg2;
1088 int nargs;
1089 rtx op0, op1, op2;
1091 if (!validate_arglist (exp, POINTER_TYPE, 0))
1092 return;
1094 arg0 = CALL_EXPR_ARG (exp, 0);
1096 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1097 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1098 locality). */
1099 nargs = call_expr_nargs (exp);
1100 if (nargs > 1)
1101 arg1 = CALL_EXPR_ARG (exp, 1);
1102 else
1103 arg1 = integer_zero_node;
1104 if (nargs > 2)
1105 arg2 = CALL_EXPR_ARG (exp, 2);
1106 else
1107 arg2 = integer_three_node;
1109 /* Argument 0 is an address. */
1110 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1112 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1113 if (TREE_CODE (arg1) != INTEGER_CST)
1115 error ("second argument to %<__builtin_prefetch%> must be a constant");
1116 arg1 = integer_zero_node;
1118 op1 = expand_normal (arg1);
1119 /* Argument 1 must be either zero or one. */
1120 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1122 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1123 " using zero");
1124 op1 = const0_rtx;
1127 /* Argument 2 (locality) must be a compile-time constant int. */
1128 if (TREE_CODE (arg2) != INTEGER_CST)
1130 error ("third argument to %<__builtin_prefetch%> must be a constant");
1131 arg2 = integer_zero_node;
1133 op2 = expand_normal (arg2);
1134 /* Argument 2 must be 0, 1, 2, or 3. */
1135 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1137 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1138 op2 = const0_rtx;
1141 #ifdef HAVE_prefetch
1142 if (HAVE_prefetch)
1144 struct expand_operand ops[3];
1146 create_address_operand (&ops[0], op0);
1147 create_integer_operand (&ops[1], INTVAL (op1));
1148 create_integer_operand (&ops[2], INTVAL (op2));
1149 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1150 return;
1152 #endif
1154 /* Don't do anything with direct references to volatile memory, but
1155 generate code to handle other side effects. */
1156 if (!MEM_P (op0) && side_effects_p (op0))
1157 emit_insn (op0);
1160 /* Get a MEM rtx for expression EXP which is the address of an operand
1161 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1162 the maximum length of the block of memory that might be accessed or
1163 NULL if unknown. */
1165 static rtx
1166 get_memory_rtx (tree exp, tree len)
1168 tree orig_exp = exp;
1169 rtx addr, mem;
1170 HOST_WIDE_INT off;
1172 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1173 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1174 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1175 exp = TREE_OPERAND (exp, 0);
1177 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1178 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1180 /* Get an expression we can use to find the attributes to assign to MEM.
1181 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1182 we can. First remove any nops. */
1183 while (CONVERT_EXPR_P (exp)
1184 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1185 exp = TREE_OPERAND (exp, 0);
1187 off = 0;
1188 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1189 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1190 && host_integerp (TREE_OPERAND (exp, 1), 0)
1191 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1192 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1193 else if (TREE_CODE (exp) == ADDR_EXPR)
1194 exp = TREE_OPERAND (exp, 0);
1195 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1196 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1197 else
1198 exp = NULL;
1200 /* Honor attributes derived from exp, except for the alias set
1201 (as builtin stringops may alias with anything) and the size
1202 (as stringops may access multiple array elements). */
1203 if (exp)
1205 set_mem_attributes (mem, exp, 0);
1207 if (off)
1208 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1210 /* Allow the string and memory builtins to overflow from one
1211 field into another, see http://gcc.gnu.org/PR23561.
1212 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1213 memory accessed by the string or memory builtin will fit
1214 within the field. */
1215 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1217 tree mem_expr = MEM_EXPR (mem);
1218 HOST_WIDE_INT offset = -1, length = -1;
1219 tree inner = exp;
1221 while (TREE_CODE (inner) == ARRAY_REF
1222 || CONVERT_EXPR_P (inner)
1223 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1224 || TREE_CODE (inner) == SAVE_EXPR)
1225 inner = TREE_OPERAND (inner, 0);
1227 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1229 if (MEM_OFFSET_KNOWN_P (mem))
1230 offset = MEM_OFFSET (mem);
1232 if (offset >= 0 && len && host_integerp (len, 0))
1233 length = tree_low_cst (len, 0);
1235 while (TREE_CODE (inner) == COMPONENT_REF)
1237 tree field = TREE_OPERAND (inner, 1);
1238 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1239 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1241 /* Bitfields are generally not byte-addressable. */
1242 gcc_assert (!DECL_BIT_FIELD (field)
1243 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1244 % BITS_PER_UNIT) == 0
1245 && host_integerp (DECL_SIZE (field), 0)
1246 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1247 % BITS_PER_UNIT) == 0));
1249 /* If we can prove that the memory starting at XEXP (mem, 0) and
1250 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1251 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1252 fields without DECL_SIZE_UNIT like flexible array members. */
1253 if (length >= 0
1254 && DECL_SIZE_UNIT (field)
1255 && host_integerp (DECL_SIZE_UNIT (field), 0))
1257 HOST_WIDE_INT size
1258 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1259 if (offset <= size
1260 && length <= size
1261 && offset + length <= size)
1262 break;
1265 if (offset >= 0
1266 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1267 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1268 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1269 / BITS_PER_UNIT;
1270 else
1272 offset = -1;
1273 length = -1;
1276 mem_expr = TREE_OPERAND (mem_expr, 0);
1277 inner = TREE_OPERAND (inner, 0);
1280 if (mem_expr == NULL)
1281 offset = -1;
1282 if (mem_expr != MEM_EXPR (mem))
1284 set_mem_expr (mem, mem_expr);
1285 if (offset >= 0)
1286 set_mem_offset (mem, offset);
1287 else
1288 clear_mem_offset (mem);
1291 set_mem_alias_set (mem, 0);
1292 clear_mem_size (mem);
1295 return mem;
1298 /* Built-in functions to perform an untyped call and return. */
1300 #define apply_args_mode \
1301 (this_target_builtins->x_apply_args_mode)
1302 #define apply_result_mode \
1303 (this_target_builtins->x_apply_result_mode)
1305 /* Return the size required for the block returned by __builtin_apply_args,
1306 and initialize apply_args_mode. */
1308 static int
1309 apply_args_size (void)
1311 static int size = -1;
1312 int align;
1313 unsigned int regno;
1314 enum machine_mode mode;
1316 /* The values computed by this function never change. */
1317 if (size < 0)
1319 /* The first value is the incoming arg-pointer. */
1320 size = GET_MODE_SIZE (Pmode);
1322 /* The second value is the structure value address unless this is
1323 passed as an "invisible" first argument. */
1324 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1325 size += GET_MODE_SIZE (Pmode);
1327 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1328 if (FUNCTION_ARG_REGNO_P (regno))
1330 mode = targetm.calls.get_raw_arg_mode (regno);
1332 gcc_assert (mode != VOIDmode);
1334 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1335 if (size % align != 0)
1336 size = CEIL (size, align) * align;
1337 size += GET_MODE_SIZE (mode);
1338 apply_args_mode[regno] = mode;
1340 else
1342 apply_args_mode[regno] = VOIDmode;
1345 return size;
1348 /* Return the size required for the block returned by __builtin_apply,
1349 and initialize apply_result_mode. */
1351 static int
1352 apply_result_size (void)
1354 static int size = -1;
1355 int align, regno;
1356 enum machine_mode mode;
1358 /* The values computed by this function never change. */
1359 if (size < 0)
1361 size = 0;
1363 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1364 if (targetm.calls.function_value_regno_p (regno))
1366 mode = targetm.calls.get_raw_result_mode (regno);
1368 gcc_assert (mode != VOIDmode);
1370 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1371 if (size % align != 0)
1372 size = CEIL (size, align) * align;
1373 size += GET_MODE_SIZE (mode);
1374 apply_result_mode[regno] = mode;
1376 else
1377 apply_result_mode[regno] = VOIDmode;
1379 /* Allow targets that use untyped_call and untyped_return to override
1380 the size so that machine-specific information can be stored here. */
1381 #ifdef APPLY_RESULT_SIZE
1382 size = APPLY_RESULT_SIZE;
1383 #endif
1385 return size;
1388 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1389 /* Create a vector describing the result block RESULT. If SAVEP is true,
1390 the result block is used to save the values; otherwise it is used to
1391 restore the values. */
1393 static rtx
1394 result_vector (int savep, rtx result)
1396 int regno, size, align, nelts;
1397 enum machine_mode mode;
1398 rtx reg, mem;
1399 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1401 size = nelts = 0;
1402 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1403 if ((mode = apply_result_mode[regno]) != VOIDmode)
1405 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1406 if (size % align != 0)
1407 size = CEIL (size, align) * align;
1408 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1409 mem = adjust_address (result, mode, size);
1410 savevec[nelts++] = (savep
1411 ? gen_rtx_SET (VOIDmode, mem, reg)
1412 : gen_rtx_SET (VOIDmode, reg, mem));
1413 size += GET_MODE_SIZE (mode);
1415 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1417 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1419 /* Save the state required to perform an untyped call with the same
1420 arguments as were passed to the current function. */
1422 static rtx
1423 expand_builtin_apply_args_1 (void)
1425 rtx registers, tem;
1426 int size, align, regno;
1427 enum machine_mode mode;
1428 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1430 /* Create a block where the arg-pointer, structure value address,
1431 and argument registers can be saved. */
1432 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1434 /* Walk past the arg-pointer and structure value address. */
1435 size = GET_MODE_SIZE (Pmode);
1436 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1437 size += GET_MODE_SIZE (Pmode);
1439 /* Save each register used in calling a function to the block. */
1440 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1441 if ((mode = apply_args_mode[regno]) != VOIDmode)
1443 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1444 if (size % align != 0)
1445 size = CEIL (size, align) * align;
1447 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1449 emit_move_insn (adjust_address (registers, mode, size), tem);
1450 size += GET_MODE_SIZE (mode);
1453 /* Save the arg pointer to the block. */
1454 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1455 #ifdef STACK_GROWS_DOWNWARD
1456 /* We need the pointer as the caller actually passed them to us, not
1457 as we might have pretended they were passed. Make sure it's a valid
1458 operand, as emit_move_insn isn't expected to handle a PLUS. */
1460 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1461 NULL_RTX);
1462 #endif
1463 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1465 size = GET_MODE_SIZE (Pmode);
1467 /* Save the structure value address unless this is passed as an
1468 "invisible" first argument. */
1469 if (struct_incoming_value)
1471 emit_move_insn (adjust_address (registers, Pmode, size),
1472 copy_to_reg (struct_incoming_value));
1473 size += GET_MODE_SIZE (Pmode);
1476 /* Return the address of the block. */
1477 return copy_addr_to_reg (XEXP (registers, 0));
1480 /* __builtin_apply_args returns block of memory allocated on
1481 the stack into which is stored the arg pointer, structure
1482 value address, static chain, and all the registers that might
1483 possibly be used in performing a function call. The code is
1484 moved to the start of the function so the incoming values are
1485 saved. */
1487 static rtx
1488 expand_builtin_apply_args (void)
1490 /* Don't do __builtin_apply_args more than once in a function.
1491 Save the result of the first call and reuse it. */
1492 if (apply_args_value != 0)
1493 return apply_args_value;
1495 /* When this function is called, it means that registers must be
1496 saved on entry to this function. So we migrate the
1497 call to the first insn of this function. */
1498 rtx temp;
1499 rtx seq;
1501 start_sequence ();
1502 temp = expand_builtin_apply_args_1 ();
1503 seq = get_insns ();
1504 end_sequence ();
1506 apply_args_value = temp;
1508 /* Put the insns after the NOTE that starts the function.
1509 If this is inside a start_sequence, make the outer-level insn
1510 chain current, so the code is placed at the start of the
1511 function. If internal_arg_pointer is a non-virtual pseudo,
1512 it needs to be placed after the function that initializes
1513 that pseudo. */
1514 push_topmost_sequence ();
1515 if (REG_P (crtl->args.internal_arg_pointer)
1516 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1517 emit_insn_before (seq, parm_birth_insn);
1518 else
1519 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1520 pop_topmost_sequence ();
1521 return temp;
1525 /* Perform an untyped call and save the state required to perform an
1526 untyped return of whatever value was returned by the given function. */
1528 static rtx
1529 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1531 int size, align, regno;
1532 enum machine_mode mode;
1533 rtx incoming_args, result, reg, dest, src, call_insn;
1534 rtx old_stack_level = 0;
1535 rtx call_fusage = 0;
1536 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1538 arguments = convert_memory_address (Pmode, arguments);
1540 /* Create a block where the return registers can be saved. */
1541 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1543 /* Fetch the arg pointer from the ARGUMENTS block. */
1544 incoming_args = gen_reg_rtx (Pmode);
1545 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1546 #ifndef STACK_GROWS_DOWNWARD
1547 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1548 incoming_args, 0, OPTAB_LIB_WIDEN);
1549 #endif
1551 /* Push a new argument block and copy the arguments. Do not allow
1552 the (potential) memcpy call below to interfere with our stack
1553 manipulations. */
1554 do_pending_stack_adjust ();
1555 NO_DEFER_POP;
1557 /* Save the stack with nonlocal if available. */
1558 #ifdef HAVE_save_stack_nonlocal
1559 if (HAVE_save_stack_nonlocal)
1560 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1561 else
1562 #endif
1563 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1565 /* Allocate a block of memory onto the stack and copy the memory
1566 arguments to the outgoing arguments address. We can pass TRUE
1567 as the 4th argument because we just saved the stack pointer
1568 and will restore it right after the call. */
1569 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1571 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1572 may have already set current_function_calls_alloca to true.
1573 current_function_calls_alloca won't be set if argsize is zero,
1574 so we have to guarantee need_drap is true here. */
1575 if (SUPPORTS_STACK_ALIGNMENT)
1576 crtl->need_drap = true;
1578 dest = virtual_outgoing_args_rtx;
1579 #ifndef STACK_GROWS_DOWNWARD
1580 if (CONST_INT_P (argsize))
1581 dest = plus_constant (dest, -INTVAL (argsize));
1582 else
1583 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1584 #endif
1585 dest = gen_rtx_MEM (BLKmode, dest);
1586 set_mem_align (dest, PARM_BOUNDARY);
1587 src = gen_rtx_MEM (BLKmode, incoming_args);
1588 set_mem_align (src, PARM_BOUNDARY);
1589 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1591 /* Refer to the argument block. */
1592 apply_args_size ();
1593 arguments = gen_rtx_MEM (BLKmode, arguments);
1594 set_mem_align (arguments, PARM_BOUNDARY);
1596 /* Walk past the arg-pointer and structure value address. */
1597 size = GET_MODE_SIZE (Pmode);
1598 if (struct_value)
1599 size += GET_MODE_SIZE (Pmode);
1601 /* Restore each of the registers previously saved. Make USE insns
1602 for each of these registers for use in making the call. */
1603 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1604 if ((mode = apply_args_mode[regno]) != VOIDmode)
1606 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1607 if (size % align != 0)
1608 size = CEIL (size, align) * align;
1609 reg = gen_rtx_REG (mode, regno);
1610 emit_move_insn (reg, adjust_address (arguments, mode, size));
1611 use_reg (&call_fusage, reg);
1612 size += GET_MODE_SIZE (mode);
1615 /* Restore the structure value address unless this is passed as an
1616 "invisible" first argument. */
1617 size = GET_MODE_SIZE (Pmode);
1618 if (struct_value)
1620 rtx value = gen_reg_rtx (Pmode);
1621 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1622 emit_move_insn (struct_value, value);
1623 if (REG_P (struct_value))
1624 use_reg (&call_fusage, struct_value);
1625 size += GET_MODE_SIZE (Pmode);
1628 /* All arguments and registers used for the call are set up by now! */
1629 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1631 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1632 and we don't want to load it into a register as an optimization,
1633 because prepare_call_address already did it if it should be done. */
1634 if (GET_CODE (function) != SYMBOL_REF)
1635 function = memory_address (FUNCTION_MODE, function);
1637 /* Generate the actual call instruction and save the return value. */
1638 #ifdef HAVE_untyped_call
1639 if (HAVE_untyped_call)
1640 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1641 result, result_vector (1, result)));
1642 else
1643 #endif
1644 #ifdef HAVE_call_value
1645 if (HAVE_call_value)
1647 rtx valreg = 0;
1649 /* Locate the unique return register. It is not possible to
1650 express a call that sets more than one return register using
1651 call_value; use untyped_call for that. In fact, untyped_call
1652 only needs to save the return registers in the given block. */
1653 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1654 if ((mode = apply_result_mode[regno]) != VOIDmode)
1656 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1658 valreg = gen_rtx_REG (mode, regno);
1661 emit_call_insn (GEN_CALL_VALUE (valreg,
1662 gen_rtx_MEM (FUNCTION_MODE, function),
1663 const0_rtx, NULL_RTX, const0_rtx));
1665 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1667 else
1668 #endif
1669 gcc_unreachable ();
1671 /* Find the CALL insn we just emitted, and attach the register usage
1672 information. */
1673 call_insn = last_call_insn ();
1674 add_function_usage_to (call_insn, call_fusage);
1676 /* Restore the stack. */
1677 #ifdef HAVE_save_stack_nonlocal
1678 if (HAVE_save_stack_nonlocal)
1679 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1680 else
1681 #endif
1682 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1684 OK_DEFER_POP;
1686 /* Return the address of the result block. */
1687 result = copy_addr_to_reg (XEXP (result, 0));
1688 return convert_memory_address (ptr_mode, result);
1691 /* Perform an untyped return. */
1693 static void
1694 expand_builtin_return (rtx result)
1696 int size, align, regno;
1697 enum machine_mode mode;
1698 rtx reg;
1699 rtx call_fusage = 0;
1701 result = convert_memory_address (Pmode, result);
1703 apply_result_size ();
1704 result = gen_rtx_MEM (BLKmode, result);
1706 #ifdef HAVE_untyped_return
1707 if (HAVE_untyped_return)
1709 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1710 emit_barrier ();
1711 return;
1713 #endif
1715 /* Restore the return value and note that each value is used. */
1716 size = 0;
1717 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1718 if ((mode = apply_result_mode[regno]) != VOIDmode)
1720 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1721 if (size % align != 0)
1722 size = CEIL (size, align) * align;
1723 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1724 emit_move_insn (reg, adjust_address (result, mode, size));
1726 push_to_sequence (call_fusage);
1727 emit_use (reg);
1728 call_fusage = get_insns ();
1729 end_sequence ();
1730 size += GET_MODE_SIZE (mode);
1733 /* Put the USE insns before the return. */
1734 emit_insn (call_fusage);
1736 /* Return whatever values was restored by jumping directly to the end
1737 of the function. */
1738 expand_naked_return ();
1741 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1743 static enum type_class
1744 type_to_class (tree type)
1746 switch (TREE_CODE (type))
1748 case VOID_TYPE: return void_type_class;
1749 case INTEGER_TYPE: return integer_type_class;
1750 case ENUMERAL_TYPE: return enumeral_type_class;
1751 case BOOLEAN_TYPE: return boolean_type_class;
1752 case POINTER_TYPE: return pointer_type_class;
1753 case REFERENCE_TYPE: return reference_type_class;
1754 case OFFSET_TYPE: return offset_type_class;
1755 case REAL_TYPE: return real_type_class;
1756 case COMPLEX_TYPE: return complex_type_class;
1757 case FUNCTION_TYPE: return function_type_class;
1758 case METHOD_TYPE: return method_type_class;
1759 case RECORD_TYPE: return record_type_class;
1760 case UNION_TYPE:
1761 case QUAL_UNION_TYPE: return union_type_class;
1762 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1763 ? string_type_class : array_type_class);
1764 case LANG_TYPE: return lang_type_class;
1765 default: return no_type_class;
1769 /* Expand a call EXP to __builtin_classify_type. */
1771 static rtx
1772 expand_builtin_classify_type (tree exp)
1774 if (call_expr_nargs (exp))
1775 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1776 return GEN_INT (no_type_class);
1779 /* This helper macro, meant to be used in mathfn_built_in below,
1780 determines which among a set of three builtin math functions is
1781 appropriate for a given type mode. The `F' and `L' cases are
1782 automatically generated from the `double' case. */
1783 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1784 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1785 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1786 fcodel = BUILT_IN_MATHFN##L ; break;
1787 /* Similar to above, but appends _R after any F/L suffix. */
1788 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1789 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1790 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1791 fcodel = BUILT_IN_MATHFN##L_R ; break;
1793 /* Return mathematic function equivalent to FN but operating directly
1794 on TYPE, if available. If IMPLICIT is true find the function in
1795 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1796 can't do the conversion, return zero. */
1798 static tree
1799 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1801 tree const *const fn_arr
1802 = implicit ? implicit_built_in_decls : built_in_decls;
1803 enum built_in_function fcode, fcodef, fcodel;
1805 switch (fn)
1807 CASE_MATHFN (BUILT_IN_ACOS)
1808 CASE_MATHFN (BUILT_IN_ACOSH)
1809 CASE_MATHFN (BUILT_IN_ASIN)
1810 CASE_MATHFN (BUILT_IN_ASINH)
1811 CASE_MATHFN (BUILT_IN_ATAN)
1812 CASE_MATHFN (BUILT_IN_ATAN2)
1813 CASE_MATHFN (BUILT_IN_ATANH)
1814 CASE_MATHFN (BUILT_IN_CBRT)
1815 CASE_MATHFN (BUILT_IN_CEIL)
1816 CASE_MATHFN (BUILT_IN_CEXPI)
1817 CASE_MATHFN (BUILT_IN_COPYSIGN)
1818 CASE_MATHFN (BUILT_IN_COS)
1819 CASE_MATHFN (BUILT_IN_COSH)
1820 CASE_MATHFN (BUILT_IN_DREM)
1821 CASE_MATHFN (BUILT_IN_ERF)
1822 CASE_MATHFN (BUILT_IN_ERFC)
1823 CASE_MATHFN (BUILT_IN_EXP)
1824 CASE_MATHFN (BUILT_IN_EXP10)
1825 CASE_MATHFN (BUILT_IN_EXP2)
1826 CASE_MATHFN (BUILT_IN_EXPM1)
1827 CASE_MATHFN (BUILT_IN_FABS)
1828 CASE_MATHFN (BUILT_IN_FDIM)
1829 CASE_MATHFN (BUILT_IN_FLOOR)
1830 CASE_MATHFN (BUILT_IN_FMA)
1831 CASE_MATHFN (BUILT_IN_FMAX)
1832 CASE_MATHFN (BUILT_IN_FMIN)
1833 CASE_MATHFN (BUILT_IN_FMOD)
1834 CASE_MATHFN (BUILT_IN_FREXP)
1835 CASE_MATHFN (BUILT_IN_GAMMA)
1836 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1837 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1838 CASE_MATHFN (BUILT_IN_HYPOT)
1839 CASE_MATHFN (BUILT_IN_ILOGB)
1840 CASE_MATHFN (BUILT_IN_ICEIL)
1841 CASE_MATHFN (BUILT_IN_IFLOOR)
1842 CASE_MATHFN (BUILT_IN_INF)
1843 CASE_MATHFN (BUILT_IN_IRINT)
1844 CASE_MATHFN (BUILT_IN_IROUND)
1845 CASE_MATHFN (BUILT_IN_ISINF)
1846 CASE_MATHFN (BUILT_IN_J0)
1847 CASE_MATHFN (BUILT_IN_J1)
1848 CASE_MATHFN (BUILT_IN_JN)
1849 CASE_MATHFN (BUILT_IN_LCEIL)
1850 CASE_MATHFN (BUILT_IN_LDEXP)
1851 CASE_MATHFN (BUILT_IN_LFLOOR)
1852 CASE_MATHFN (BUILT_IN_LGAMMA)
1853 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1854 CASE_MATHFN (BUILT_IN_LLCEIL)
1855 CASE_MATHFN (BUILT_IN_LLFLOOR)
1856 CASE_MATHFN (BUILT_IN_LLRINT)
1857 CASE_MATHFN (BUILT_IN_LLROUND)
1858 CASE_MATHFN (BUILT_IN_LOG)
1859 CASE_MATHFN (BUILT_IN_LOG10)
1860 CASE_MATHFN (BUILT_IN_LOG1P)
1861 CASE_MATHFN (BUILT_IN_LOG2)
1862 CASE_MATHFN (BUILT_IN_LOGB)
1863 CASE_MATHFN (BUILT_IN_LRINT)
1864 CASE_MATHFN (BUILT_IN_LROUND)
1865 CASE_MATHFN (BUILT_IN_MODF)
1866 CASE_MATHFN (BUILT_IN_NAN)
1867 CASE_MATHFN (BUILT_IN_NANS)
1868 CASE_MATHFN (BUILT_IN_NEARBYINT)
1869 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1870 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1871 CASE_MATHFN (BUILT_IN_POW)
1872 CASE_MATHFN (BUILT_IN_POWI)
1873 CASE_MATHFN (BUILT_IN_POW10)
1874 CASE_MATHFN (BUILT_IN_REMAINDER)
1875 CASE_MATHFN (BUILT_IN_REMQUO)
1876 CASE_MATHFN (BUILT_IN_RINT)
1877 CASE_MATHFN (BUILT_IN_ROUND)
1878 CASE_MATHFN (BUILT_IN_SCALB)
1879 CASE_MATHFN (BUILT_IN_SCALBLN)
1880 CASE_MATHFN (BUILT_IN_SCALBN)
1881 CASE_MATHFN (BUILT_IN_SIGNBIT)
1882 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1883 CASE_MATHFN (BUILT_IN_SIN)
1884 CASE_MATHFN (BUILT_IN_SINCOS)
1885 CASE_MATHFN (BUILT_IN_SINH)
1886 CASE_MATHFN (BUILT_IN_SQRT)
1887 CASE_MATHFN (BUILT_IN_TAN)
1888 CASE_MATHFN (BUILT_IN_TANH)
1889 CASE_MATHFN (BUILT_IN_TGAMMA)
1890 CASE_MATHFN (BUILT_IN_TRUNC)
1891 CASE_MATHFN (BUILT_IN_Y0)
1892 CASE_MATHFN (BUILT_IN_Y1)
1893 CASE_MATHFN (BUILT_IN_YN)
1895 default:
1896 return NULL_TREE;
1899 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1900 return fn_arr[fcode];
1901 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1902 return fn_arr[fcodef];
1903 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1904 return fn_arr[fcodel];
1905 else
1906 return NULL_TREE;
1909 /* Like mathfn_built_in_1(), but always use the implicit array. */
1911 tree
1912 mathfn_built_in (tree type, enum built_in_function fn)
1914 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1917 /* If errno must be maintained, expand the RTL to check if the result,
1918 TARGET, of a built-in function call, EXP, is NaN, and if so set
1919 errno to EDOM. */
1921 static void
1922 expand_errno_check (tree exp, rtx target)
1924 rtx lab = gen_label_rtx ();
1926 /* Test the result; if it is NaN, set errno=EDOM because
1927 the argument was not in the domain. */
1928 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1929 NULL_RTX, NULL_RTX, lab,
1930 /* The jump is very likely. */
1931 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1933 #ifdef TARGET_EDOM
1934 /* If this built-in doesn't throw an exception, set errno directly. */
1935 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1937 #ifdef GEN_ERRNO_RTX
1938 rtx errno_rtx = GEN_ERRNO_RTX;
1939 #else
1940 rtx errno_rtx
1941 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1942 #endif
1943 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1944 emit_label (lab);
1945 return;
1947 #endif
1949 /* Make sure the library call isn't expanded as a tail call. */
1950 CALL_EXPR_TAILCALL (exp) = 0;
1952 /* We can't set errno=EDOM directly; let the library call do it.
1953 Pop the arguments right away in case the call gets deleted. */
1954 NO_DEFER_POP;
1955 expand_call (exp, target, 0);
1956 OK_DEFER_POP;
1957 emit_label (lab);
1960 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1961 Return NULL_RTX if a normal call should be emitted rather than expanding
1962 the function in-line. EXP is the expression that is a call to the builtin
1963 function; if convenient, the result should be placed in TARGET.
1964 SUBTARGET may be used as the target for computing one of EXP's operands. */
1966 static rtx
1967 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1969 optab builtin_optab;
1970 rtx op0, insns;
1971 tree fndecl = get_callee_fndecl (exp);
1972 enum machine_mode mode;
1973 bool errno_set = false;
1974 tree arg;
1976 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1977 return NULL_RTX;
1979 arg = CALL_EXPR_ARG (exp, 0);
1981 switch (DECL_FUNCTION_CODE (fndecl))
1983 CASE_FLT_FN (BUILT_IN_SQRT):
1984 errno_set = ! tree_expr_nonnegative_p (arg);
1985 builtin_optab = sqrt_optab;
1986 break;
1987 CASE_FLT_FN (BUILT_IN_EXP):
1988 errno_set = true; builtin_optab = exp_optab; break;
1989 CASE_FLT_FN (BUILT_IN_EXP10):
1990 CASE_FLT_FN (BUILT_IN_POW10):
1991 errno_set = true; builtin_optab = exp10_optab; break;
1992 CASE_FLT_FN (BUILT_IN_EXP2):
1993 errno_set = true; builtin_optab = exp2_optab; break;
1994 CASE_FLT_FN (BUILT_IN_EXPM1):
1995 errno_set = true; builtin_optab = expm1_optab; break;
1996 CASE_FLT_FN (BUILT_IN_LOGB):
1997 errno_set = true; builtin_optab = logb_optab; break;
1998 CASE_FLT_FN (BUILT_IN_LOG):
1999 errno_set = true; builtin_optab = log_optab; break;
2000 CASE_FLT_FN (BUILT_IN_LOG10):
2001 errno_set = true; builtin_optab = log10_optab; break;
2002 CASE_FLT_FN (BUILT_IN_LOG2):
2003 errno_set = true; builtin_optab = log2_optab; break;
2004 CASE_FLT_FN (BUILT_IN_LOG1P):
2005 errno_set = true; builtin_optab = log1p_optab; break;
2006 CASE_FLT_FN (BUILT_IN_ASIN):
2007 builtin_optab = asin_optab; break;
2008 CASE_FLT_FN (BUILT_IN_ACOS):
2009 builtin_optab = acos_optab; break;
2010 CASE_FLT_FN (BUILT_IN_TAN):
2011 builtin_optab = tan_optab; break;
2012 CASE_FLT_FN (BUILT_IN_ATAN):
2013 builtin_optab = atan_optab; break;
2014 CASE_FLT_FN (BUILT_IN_FLOOR):
2015 builtin_optab = floor_optab; break;
2016 CASE_FLT_FN (BUILT_IN_CEIL):
2017 builtin_optab = ceil_optab; break;
2018 CASE_FLT_FN (BUILT_IN_TRUNC):
2019 builtin_optab = btrunc_optab; break;
2020 CASE_FLT_FN (BUILT_IN_ROUND):
2021 builtin_optab = round_optab; break;
2022 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2023 builtin_optab = nearbyint_optab;
2024 if (flag_trapping_math)
2025 break;
2026 /* Else fallthrough and expand as rint. */
2027 CASE_FLT_FN (BUILT_IN_RINT):
2028 builtin_optab = rint_optab; break;
2029 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2030 builtin_optab = significand_optab; break;
2031 default:
2032 gcc_unreachable ();
2035 /* Make a suitable register to place result in. */
2036 mode = TYPE_MODE (TREE_TYPE (exp));
2038 if (! flag_errno_math || ! HONOR_NANS (mode))
2039 errno_set = false;
2041 /* Before working hard, check whether the instruction is available. */
2042 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2043 && (!errno_set || !optimize_insn_for_size_p ()))
2045 target = gen_reg_rtx (mode);
2047 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2048 need to expand the argument again. This way, we will not perform
2049 side-effects more the once. */
2050 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2052 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2054 start_sequence ();
2056 /* Compute into TARGET.
2057 Set TARGET to wherever the result comes back. */
2058 target = expand_unop (mode, builtin_optab, op0, target, 0);
2060 if (target != 0)
2062 if (errno_set)
2063 expand_errno_check (exp, target);
2065 /* Output the entire sequence. */
2066 insns = get_insns ();
2067 end_sequence ();
2068 emit_insn (insns);
2069 return target;
2072 /* If we were unable to expand via the builtin, stop the sequence
2073 (without outputting the insns) and call to the library function
2074 with the stabilized argument list. */
2075 end_sequence ();
2078 return expand_call (exp, target, target == const0_rtx);
2081 /* Expand a call to the builtin binary math functions (pow and atan2).
2082 Return NULL_RTX if a normal call should be emitted rather than expanding the
2083 function in-line. EXP is the expression that is a call to the builtin
2084 function; if convenient, the result should be placed in TARGET.
2085 SUBTARGET may be used as the target for computing one of EXP's
2086 operands. */
2088 static rtx
2089 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2091 optab builtin_optab;
2092 rtx op0, op1, insns;
2093 int op1_type = REAL_TYPE;
2094 tree fndecl = get_callee_fndecl (exp);
2095 tree arg0, arg1;
2096 enum machine_mode mode;
2097 bool errno_set = true;
2099 switch (DECL_FUNCTION_CODE (fndecl))
2101 CASE_FLT_FN (BUILT_IN_SCALBN):
2102 CASE_FLT_FN (BUILT_IN_SCALBLN):
2103 CASE_FLT_FN (BUILT_IN_LDEXP):
2104 op1_type = INTEGER_TYPE;
2105 default:
2106 break;
2109 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2110 return NULL_RTX;
2112 arg0 = CALL_EXPR_ARG (exp, 0);
2113 arg1 = CALL_EXPR_ARG (exp, 1);
2115 switch (DECL_FUNCTION_CODE (fndecl))
2117 CASE_FLT_FN (BUILT_IN_POW):
2118 builtin_optab = pow_optab; break;
2119 CASE_FLT_FN (BUILT_IN_ATAN2):
2120 builtin_optab = atan2_optab; break;
2121 CASE_FLT_FN (BUILT_IN_SCALB):
2122 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2123 return 0;
2124 builtin_optab = scalb_optab; break;
2125 CASE_FLT_FN (BUILT_IN_SCALBN):
2126 CASE_FLT_FN (BUILT_IN_SCALBLN):
2127 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2128 return 0;
2129 /* Fall through... */
2130 CASE_FLT_FN (BUILT_IN_LDEXP):
2131 builtin_optab = ldexp_optab; break;
2132 CASE_FLT_FN (BUILT_IN_FMOD):
2133 builtin_optab = fmod_optab; break;
2134 CASE_FLT_FN (BUILT_IN_REMAINDER):
2135 CASE_FLT_FN (BUILT_IN_DREM):
2136 builtin_optab = remainder_optab; break;
2137 default:
2138 gcc_unreachable ();
2141 /* Make a suitable register to place result in. */
2142 mode = TYPE_MODE (TREE_TYPE (exp));
2144 /* Before working hard, check whether the instruction is available. */
2145 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2146 return NULL_RTX;
2148 target = gen_reg_rtx (mode);
2150 if (! flag_errno_math || ! HONOR_NANS (mode))
2151 errno_set = false;
2153 if (errno_set && optimize_insn_for_size_p ())
2154 return 0;
2156 /* Always stabilize the argument list. */
2157 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2158 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2160 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2161 op1 = expand_normal (arg1);
2163 start_sequence ();
2165 /* Compute into TARGET.
2166 Set TARGET to wherever the result comes back. */
2167 target = expand_binop (mode, builtin_optab, op0, op1,
2168 target, 0, OPTAB_DIRECT);
2170 /* If we were unable to expand via the builtin, stop the sequence
2171 (without outputting the insns) and call to the library function
2172 with the stabilized argument list. */
2173 if (target == 0)
2175 end_sequence ();
2176 return expand_call (exp, target, target == const0_rtx);
2179 if (errno_set)
2180 expand_errno_check (exp, target);
2182 /* Output the entire sequence. */
2183 insns = get_insns ();
2184 end_sequence ();
2185 emit_insn (insns);
2187 return target;
2190 /* Expand a call to the builtin trinary math functions (fma).
2191 Return NULL_RTX if a normal call should be emitted rather than expanding the
2192 function in-line. EXP is the expression that is a call to the builtin
2193 function; if convenient, the result should be placed in TARGET.
2194 SUBTARGET may be used as the target for computing one of EXP's
2195 operands. */
2197 static rtx
2198 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2200 optab builtin_optab;
2201 rtx op0, op1, op2, insns;
2202 tree fndecl = get_callee_fndecl (exp);
2203 tree arg0, arg1, arg2;
2204 enum machine_mode mode;
2206 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2207 return NULL_RTX;
2209 arg0 = CALL_EXPR_ARG (exp, 0);
2210 arg1 = CALL_EXPR_ARG (exp, 1);
2211 arg2 = CALL_EXPR_ARG (exp, 2);
2213 switch (DECL_FUNCTION_CODE (fndecl))
2215 CASE_FLT_FN (BUILT_IN_FMA):
2216 builtin_optab = fma_optab; break;
2217 default:
2218 gcc_unreachable ();
2221 /* Make a suitable register to place result in. */
2222 mode = TYPE_MODE (TREE_TYPE (exp));
2224 /* Before working hard, check whether the instruction is available. */
2225 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2226 return NULL_RTX;
2228 target = gen_reg_rtx (mode);
2230 /* Always stabilize the argument list. */
2231 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2232 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2233 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2235 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2236 op1 = expand_normal (arg1);
2237 op2 = expand_normal (arg2);
2239 start_sequence ();
2241 /* Compute into TARGET.
2242 Set TARGET to wherever the result comes back. */
2243 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2244 target, 0);
2246 /* If we were unable to expand via the builtin, stop the sequence
2247 (without outputting the insns) and call to the library function
2248 with the stabilized argument list. */
2249 if (target == 0)
2251 end_sequence ();
2252 return expand_call (exp, target, target == const0_rtx);
2255 /* Output the entire sequence. */
2256 insns = get_insns ();
2257 end_sequence ();
2258 emit_insn (insns);
2260 return target;
2263 /* Expand a call to the builtin sin and cos math functions.
2264 Return NULL_RTX if a normal call should be emitted rather than expanding the
2265 function in-line. EXP is the expression that is a call to the builtin
2266 function; if convenient, the result should be placed in TARGET.
2267 SUBTARGET may be used as the target for computing one of EXP's
2268 operands. */
2270 static rtx
2271 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2273 optab builtin_optab;
2274 rtx op0, insns;
2275 tree fndecl = get_callee_fndecl (exp);
2276 enum machine_mode mode;
2277 tree arg;
2279 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2280 return NULL_RTX;
2282 arg = CALL_EXPR_ARG (exp, 0);
2284 switch (DECL_FUNCTION_CODE (fndecl))
2286 CASE_FLT_FN (BUILT_IN_SIN):
2287 CASE_FLT_FN (BUILT_IN_COS):
2288 builtin_optab = sincos_optab; break;
2289 default:
2290 gcc_unreachable ();
2293 /* Make a suitable register to place result in. */
2294 mode = TYPE_MODE (TREE_TYPE (exp));
2296 /* Check if sincos insn is available, otherwise fallback
2297 to sin or cos insn. */
2298 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2299 switch (DECL_FUNCTION_CODE (fndecl))
2301 CASE_FLT_FN (BUILT_IN_SIN):
2302 builtin_optab = sin_optab; break;
2303 CASE_FLT_FN (BUILT_IN_COS):
2304 builtin_optab = cos_optab; break;
2305 default:
2306 gcc_unreachable ();
2309 /* Before working hard, check whether the instruction is available. */
2310 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2312 target = gen_reg_rtx (mode);
2314 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2315 need to expand the argument again. This way, we will not perform
2316 side-effects more the once. */
2317 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2319 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2321 start_sequence ();
2323 /* Compute into TARGET.
2324 Set TARGET to wherever the result comes back. */
2325 if (builtin_optab == sincos_optab)
2327 int result;
2329 switch (DECL_FUNCTION_CODE (fndecl))
2331 CASE_FLT_FN (BUILT_IN_SIN):
2332 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2333 break;
2334 CASE_FLT_FN (BUILT_IN_COS):
2335 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2336 break;
2337 default:
2338 gcc_unreachable ();
2340 gcc_assert (result);
2342 else
2344 target = expand_unop (mode, builtin_optab, op0, target, 0);
2347 if (target != 0)
2349 /* Output the entire sequence. */
2350 insns = get_insns ();
2351 end_sequence ();
2352 emit_insn (insns);
2353 return target;
2356 /* If we were unable to expand via the builtin, stop the sequence
2357 (without outputting the insns) and call to the library function
2358 with the stabilized argument list. */
2359 end_sequence ();
2362 target = expand_call (exp, target, target == const0_rtx);
2364 return target;
2367 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2368 return an RTL instruction code that implements the functionality.
2369 If that isn't possible or available return CODE_FOR_nothing. */
2371 static enum insn_code
2372 interclass_mathfn_icode (tree arg, tree fndecl)
2374 bool errno_set = false;
2375 optab builtin_optab = 0;
2376 enum machine_mode mode;
2378 switch (DECL_FUNCTION_CODE (fndecl))
2380 CASE_FLT_FN (BUILT_IN_ILOGB):
2381 errno_set = true; builtin_optab = ilogb_optab; break;
2382 CASE_FLT_FN (BUILT_IN_ISINF):
2383 builtin_optab = isinf_optab; break;
2384 case BUILT_IN_ISNORMAL:
2385 case BUILT_IN_ISFINITE:
2386 CASE_FLT_FN (BUILT_IN_FINITE):
2387 case BUILT_IN_FINITED32:
2388 case BUILT_IN_FINITED64:
2389 case BUILT_IN_FINITED128:
2390 case BUILT_IN_ISINFD32:
2391 case BUILT_IN_ISINFD64:
2392 case BUILT_IN_ISINFD128:
2393 /* These builtins have no optabs (yet). */
2394 break;
2395 default:
2396 gcc_unreachable ();
2399 /* There's no easy way to detect the case we need to set EDOM. */
2400 if (flag_errno_math && errno_set)
2401 return CODE_FOR_nothing;
2403 /* Optab mode depends on the mode of the input argument. */
2404 mode = TYPE_MODE (TREE_TYPE (arg));
2406 if (builtin_optab)
2407 return optab_handler (builtin_optab, mode);
2408 return CODE_FOR_nothing;
2411 /* Expand a call to one of the builtin math functions that operate on
2412 floating point argument and output an integer result (ilogb, isinf,
2413 isnan, etc).
2414 Return 0 if a normal call should be emitted rather than expanding the
2415 function in-line. EXP is the expression that is a call to the builtin
2416 function; if convenient, the result should be placed in TARGET. */
2418 static rtx
2419 expand_builtin_interclass_mathfn (tree exp, rtx target)
2421 enum insn_code icode = CODE_FOR_nothing;
2422 rtx op0;
2423 tree fndecl = get_callee_fndecl (exp);
2424 enum machine_mode mode;
2425 tree arg;
2427 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2428 return NULL_RTX;
2430 arg = CALL_EXPR_ARG (exp, 0);
2431 icode = interclass_mathfn_icode (arg, fndecl);
2432 mode = TYPE_MODE (TREE_TYPE (arg));
2434 if (icode != CODE_FOR_nothing)
2436 struct expand_operand ops[1];
2437 rtx last = get_last_insn ();
2438 tree orig_arg = arg;
2440 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2441 need to expand the argument again. This way, we will not perform
2442 side-effects more the once. */
2443 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2445 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2447 if (mode != GET_MODE (op0))
2448 op0 = convert_to_mode (mode, op0, 0);
2450 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2451 if (maybe_legitimize_operands (icode, 0, 1, ops)
2452 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2453 return ops[0].value;
2455 delete_insns_since (last);
2456 CALL_EXPR_ARG (exp, 0) = orig_arg;
2459 return NULL_RTX;
2462 /* Expand a call to the builtin sincos math function.
2463 Return NULL_RTX if a normal call should be emitted rather than expanding the
2464 function in-line. EXP is the expression that is a call to the builtin
2465 function. */
2467 static rtx
2468 expand_builtin_sincos (tree exp)
2470 rtx op0, op1, op2, target1, target2;
2471 enum machine_mode mode;
2472 tree arg, sinp, cosp;
2473 int result;
2474 location_t loc = EXPR_LOCATION (exp);
2475 tree alias_type, alias_off;
2477 if (!validate_arglist (exp, REAL_TYPE,
2478 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2479 return NULL_RTX;
2481 arg = CALL_EXPR_ARG (exp, 0);
2482 sinp = CALL_EXPR_ARG (exp, 1);
2483 cosp = CALL_EXPR_ARG (exp, 2);
2485 /* Make a suitable register to place result in. */
2486 mode = TYPE_MODE (TREE_TYPE (arg));
2488 /* Check if sincos insn is available, otherwise emit the call. */
2489 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2490 return NULL_RTX;
2492 target1 = gen_reg_rtx (mode);
2493 target2 = gen_reg_rtx (mode);
2495 op0 = expand_normal (arg);
2496 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2497 alias_off = build_int_cst (alias_type, 0);
2498 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2499 sinp, alias_off));
2500 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2501 cosp, alias_off));
2503 /* Compute into target1 and target2.
2504 Set TARGET to wherever the result comes back. */
2505 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2506 gcc_assert (result);
2508 /* Move target1 and target2 to the memory locations indicated
2509 by op1 and op2. */
2510 emit_move_insn (op1, target1);
2511 emit_move_insn (op2, target2);
2513 return const0_rtx;
2516 /* Expand a call to the internal cexpi builtin to the sincos math function.
2517 EXP is the expression that is a call to the builtin function; if convenient,
2518 the result should be placed in TARGET. */
2520 static rtx
2521 expand_builtin_cexpi (tree exp, rtx target)
2523 tree fndecl = get_callee_fndecl (exp);
2524 tree arg, type;
2525 enum machine_mode mode;
2526 rtx op0, op1, op2;
2527 location_t loc = EXPR_LOCATION (exp);
2529 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2530 return NULL_RTX;
2532 arg = CALL_EXPR_ARG (exp, 0);
2533 type = TREE_TYPE (arg);
2534 mode = TYPE_MODE (TREE_TYPE (arg));
2536 /* Try expanding via a sincos optab, fall back to emitting a libcall
2537 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2538 is only generated from sincos, cexp or if we have either of them. */
2539 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2541 op1 = gen_reg_rtx (mode);
2542 op2 = gen_reg_rtx (mode);
2544 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2546 /* Compute into op1 and op2. */
2547 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2549 else if (TARGET_HAS_SINCOS)
2551 tree call, fn = NULL_TREE;
2552 tree top1, top2;
2553 rtx op1a, op2a;
2555 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2556 fn = built_in_decls[BUILT_IN_SINCOSF];
2557 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2558 fn = built_in_decls[BUILT_IN_SINCOS];
2559 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2560 fn = built_in_decls[BUILT_IN_SINCOSL];
2561 else
2562 gcc_unreachable ();
2564 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2565 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2566 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2567 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2568 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2569 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2571 /* Make sure not to fold the sincos call again. */
2572 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2573 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2574 call, 3, arg, top1, top2));
2576 else
2578 tree call, fn = NULL_TREE, narg;
2579 tree ctype = build_complex_type (type);
2581 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2582 fn = built_in_decls[BUILT_IN_CEXPF];
2583 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2584 fn = built_in_decls[BUILT_IN_CEXP];
2585 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2586 fn = built_in_decls[BUILT_IN_CEXPL];
2587 else
2588 gcc_unreachable ();
2590 /* If we don't have a decl for cexp create one. This is the
2591 friendliest fallback if the user calls __builtin_cexpi
2592 without full target C99 function support. */
2593 if (fn == NULL_TREE)
2595 tree fntype;
2596 const char *name = NULL;
2598 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2599 name = "cexpf";
2600 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2601 name = "cexp";
2602 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2603 name = "cexpl";
2605 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2606 fn = build_fn_decl (name, fntype);
2609 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2610 build_real (type, dconst0), arg);
2612 /* Make sure not to fold the cexp call again. */
2613 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2614 return expand_expr (build_call_nary (ctype, call, 1, narg),
2615 target, VOIDmode, EXPAND_NORMAL);
2618 /* Now build the proper return type. */
2619 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2620 make_tree (TREE_TYPE (arg), op2),
2621 make_tree (TREE_TYPE (arg), op1)),
2622 target, VOIDmode, EXPAND_NORMAL);
2625 /* Conveniently construct a function call expression. FNDECL names the
2626 function to be called, N is the number of arguments, and the "..."
2627 parameters are the argument expressions. Unlike build_call_exr
2628 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2630 static tree
2631 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2633 va_list ap;
2634 tree fntype = TREE_TYPE (fndecl);
2635 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2637 va_start (ap, n);
2638 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2639 va_end (ap);
2640 SET_EXPR_LOCATION (fn, loc);
2641 return fn;
2644 /* Expand a call to one of the builtin rounding functions gcc defines
2645 as an extension (lfloor and lceil). As these are gcc extensions we
2646 do not need to worry about setting errno to EDOM.
2647 If expanding via optab fails, lower expression to (int)(floor(x)).
2648 EXP is the expression that is a call to the builtin function;
2649 if convenient, the result should be placed in TARGET. */
2651 static rtx
2652 expand_builtin_int_roundingfn (tree exp, rtx target)
2654 convert_optab builtin_optab;
2655 rtx op0, insns, tmp;
2656 tree fndecl = get_callee_fndecl (exp);
2657 enum built_in_function fallback_fn;
2658 tree fallback_fndecl;
2659 enum machine_mode mode;
2660 tree arg;
2662 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2663 gcc_unreachable ();
2665 arg = CALL_EXPR_ARG (exp, 0);
2667 switch (DECL_FUNCTION_CODE (fndecl))
2669 CASE_FLT_FN (BUILT_IN_ICEIL):
2670 CASE_FLT_FN (BUILT_IN_LCEIL):
2671 CASE_FLT_FN (BUILT_IN_LLCEIL):
2672 builtin_optab = lceil_optab;
2673 fallback_fn = BUILT_IN_CEIL;
2674 break;
2676 CASE_FLT_FN (BUILT_IN_IFLOOR):
2677 CASE_FLT_FN (BUILT_IN_LFLOOR):
2678 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2679 builtin_optab = lfloor_optab;
2680 fallback_fn = BUILT_IN_FLOOR;
2681 break;
2683 default:
2684 gcc_unreachable ();
2687 /* Make a suitable register to place result in. */
2688 mode = TYPE_MODE (TREE_TYPE (exp));
2690 target = gen_reg_rtx (mode);
2692 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2693 need to expand the argument again. This way, we will not perform
2694 side-effects more the once. */
2695 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2697 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2699 start_sequence ();
2701 /* Compute into TARGET. */
2702 if (expand_sfix_optab (target, op0, builtin_optab))
2704 /* Output the entire sequence. */
2705 insns = get_insns ();
2706 end_sequence ();
2707 emit_insn (insns);
2708 return target;
2711 /* If we were unable to expand via the builtin, stop the sequence
2712 (without outputting the insns). */
2713 end_sequence ();
2715 /* Fall back to floating point rounding optab. */
2716 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2718 /* For non-C99 targets we may end up without a fallback fndecl here
2719 if the user called __builtin_lfloor directly. In this case emit
2720 a call to the floor/ceil variants nevertheless. This should result
2721 in the best user experience for not full C99 targets. */
2722 if (fallback_fndecl == NULL_TREE)
2724 tree fntype;
2725 const char *name = NULL;
2727 switch (DECL_FUNCTION_CODE (fndecl))
2729 case BUILT_IN_ICEIL:
2730 case BUILT_IN_LCEIL:
2731 case BUILT_IN_LLCEIL:
2732 name = "ceil";
2733 break;
2734 case BUILT_IN_ICEILF:
2735 case BUILT_IN_LCEILF:
2736 case BUILT_IN_LLCEILF:
2737 name = "ceilf";
2738 break;
2739 case BUILT_IN_ICEILL:
2740 case BUILT_IN_LCEILL:
2741 case BUILT_IN_LLCEILL:
2742 name = "ceill";
2743 break;
2744 case BUILT_IN_IFLOOR:
2745 case BUILT_IN_LFLOOR:
2746 case BUILT_IN_LLFLOOR:
2747 name = "floor";
2748 break;
2749 case BUILT_IN_IFLOORF:
2750 case BUILT_IN_LFLOORF:
2751 case BUILT_IN_LLFLOORF:
2752 name = "floorf";
2753 break;
2754 case BUILT_IN_IFLOORL:
2755 case BUILT_IN_LFLOORL:
2756 case BUILT_IN_LLFLOORL:
2757 name = "floorl";
2758 break;
2759 default:
2760 gcc_unreachable ();
2763 fntype = build_function_type_list (TREE_TYPE (arg),
2764 TREE_TYPE (arg), NULL_TREE);
2765 fallback_fndecl = build_fn_decl (name, fntype);
2768 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2770 tmp = expand_normal (exp);
2772 /* Truncate the result of floating point optab to integer
2773 via expand_fix (). */
2774 target = gen_reg_rtx (mode);
2775 expand_fix (target, tmp, 0);
2777 return target;
2780 /* Expand a call to one of the builtin math functions doing integer
2781 conversion (lrint).
2782 Return 0 if a normal call should be emitted rather than expanding the
2783 function in-line. EXP is the expression that is a call to the builtin
2784 function; if convenient, the result should be placed in TARGET. */
2786 static rtx
2787 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2789 convert_optab builtin_optab;
2790 rtx op0, insns;
2791 tree fndecl = get_callee_fndecl (exp);
2792 tree arg;
2793 enum machine_mode mode;
2795 /* There's no easy way to detect the case we need to set EDOM. */
2796 if (flag_errno_math)
2797 return NULL_RTX;
2799 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2800 gcc_unreachable ();
2802 arg = CALL_EXPR_ARG (exp, 0);
2804 switch (DECL_FUNCTION_CODE (fndecl))
2806 CASE_FLT_FN (BUILT_IN_IRINT):
2807 CASE_FLT_FN (BUILT_IN_LRINT):
2808 CASE_FLT_FN (BUILT_IN_LLRINT):
2809 builtin_optab = lrint_optab; break;
2811 CASE_FLT_FN (BUILT_IN_IROUND):
2812 CASE_FLT_FN (BUILT_IN_LROUND):
2813 CASE_FLT_FN (BUILT_IN_LLROUND):
2814 builtin_optab = lround_optab; break;
2816 default:
2817 gcc_unreachable ();
2820 /* Make a suitable register to place result in. */
2821 mode = TYPE_MODE (TREE_TYPE (exp));
2823 target = gen_reg_rtx (mode);
2825 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2826 need to expand the argument again. This way, we will not perform
2827 side-effects more the once. */
2828 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2830 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2832 start_sequence ();
2834 if (expand_sfix_optab (target, op0, builtin_optab))
2836 /* Output the entire sequence. */
2837 insns = get_insns ();
2838 end_sequence ();
2839 emit_insn (insns);
2840 return target;
2843 /* If we were unable to expand via the builtin, stop the sequence
2844 (without outputting the insns) and call to the library function
2845 with the stabilized argument list. */
2846 end_sequence ();
2848 target = expand_call (exp, target, target == const0_rtx);
2850 return target;
2853 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2854 a normal call should be emitted rather than expanding the function
2855 in-line. EXP is the expression that is a call to the builtin
2856 function; if convenient, the result should be placed in TARGET. */
2858 static rtx
2859 expand_builtin_powi (tree exp, rtx target)
2861 tree arg0, arg1;
2862 rtx op0, op1;
2863 enum machine_mode mode;
2864 enum machine_mode mode2;
2866 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2867 return NULL_RTX;
2869 arg0 = CALL_EXPR_ARG (exp, 0);
2870 arg1 = CALL_EXPR_ARG (exp, 1);
2871 mode = TYPE_MODE (TREE_TYPE (exp));
2873 /* Emit a libcall to libgcc. */
2875 /* Mode of the 2nd argument must match that of an int. */
2876 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2878 if (target == NULL_RTX)
2879 target = gen_reg_rtx (mode);
2881 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2882 if (GET_MODE (op0) != mode)
2883 op0 = convert_to_mode (mode, op0, 0);
2884 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2885 if (GET_MODE (op1) != mode2)
2886 op1 = convert_to_mode (mode2, op1, 0);
2888 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2889 target, LCT_CONST, mode, 2,
2890 op0, mode, op1, mode2);
2892 return target;
2895 /* Expand expression EXP which is a call to the strlen builtin. Return
2896 NULL_RTX if we failed the caller should emit a normal call, otherwise
2897 try to get the result in TARGET, if convenient. */
2899 static rtx
2900 expand_builtin_strlen (tree exp, rtx target,
2901 enum machine_mode target_mode)
2903 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2904 return NULL_RTX;
2905 else
2907 struct expand_operand ops[4];
2908 rtx pat;
2909 tree len;
2910 tree src = CALL_EXPR_ARG (exp, 0);
2911 rtx src_reg, before_strlen;
2912 enum machine_mode insn_mode = target_mode;
2913 enum insn_code icode = CODE_FOR_nothing;
2914 unsigned int align;
2916 /* If the length can be computed at compile-time, return it. */
2917 len = c_strlen (src, 0);
2918 if (len)
2919 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2921 /* If the length can be computed at compile-time and is constant
2922 integer, but there are side-effects in src, evaluate
2923 src for side-effects, then return len.
2924 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2925 can be optimized into: i++; x = 3; */
2926 len = c_strlen (src, 1);
2927 if (len && TREE_CODE (len) == INTEGER_CST)
2929 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2930 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2933 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2935 /* If SRC is not a pointer type, don't do this operation inline. */
2936 if (align == 0)
2937 return NULL_RTX;
2939 /* Bail out if we can't compute strlen in the right mode. */
2940 while (insn_mode != VOIDmode)
2942 icode = optab_handler (strlen_optab, insn_mode);
2943 if (icode != CODE_FOR_nothing)
2944 break;
2946 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2948 if (insn_mode == VOIDmode)
2949 return NULL_RTX;
2951 /* Make a place to hold the source address. We will not expand
2952 the actual source until we are sure that the expansion will
2953 not fail -- there are trees that cannot be expanded twice. */
2954 src_reg = gen_reg_rtx (Pmode);
2956 /* Mark the beginning of the strlen sequence so we can emit the
2957 source operand later. */
2958 before_strlen = get_last_insn ();
2960 create_output_operand (&ops[0], target, insn_mode);
2961 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2962 create_integer_operand (&ops[2], 0);
2963 create_integer_operand (&ops[3], align);
2964 if (!maybe_expand_insn (icode, 4, ops))
2965 return NULL_RTX;
2967 /* Now that we are assured of success, expand the source. */
2968 start_sequence ();
2969 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2970 if (pat != src_reg)
2972 #ifdef POINTERS_EXTEND_UNSIGNED
2973 if (GET_MODE (pat) != Pmode)
2974 pat = convert_to_mode (Pmode, pat,
2975 POINTERS_EXTEND_UNSIGNED);
2976 #endif
2977 emit_move_insn (src_reg, pat);
2979 pat = get_insns ();
2980 end_sequence ();
2982 if (before_strlen)
2983 emit_insn_after (pat, before_strlen);
2984 else
2985 emit_insn_before (pat, get_insns ());
2987 /* Return the value in the proper mode for this function. */
2988 if (GET_MODE (ops[0].value) == target_mode)
2989 target = ops[0].value;
2990 else if (target != 0)
2991 convert_move (target, ops[0].value, 0);
2992 else
2993 target = convert_to_mode (target_mode, ops[0].value, 0);
2995 return target;
2999 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3000 bytes from constant string DATA + OFFSET and return it as target
3001 constant. */
3003 static rtx
3004 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3005 enum machine_mode mode)
3007 const char *str = (const char *) data;
3009 gcc_assert (offset >= 0
3010 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3011 <= strlen (str) + 1));
3013 return c_readstr (str + offset, mode);
3016 /* Expand a call EXP to the memcpy builtin.
3017 Return NULL_RTX if we failed, the caller should emit a normal call,
3018 otherwise try to get the result in TARGET, if convenient (and in
3019 mode MODE if that's convenient). */
3021 static rtx
3022 expand_builtin_memcpy (tree exp, rtx target)
3024 if (!validate_arglist (exp,
3025 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3026 return NULL_RTX;
3027 else
3029 tree dest = CALL_EXPR_ARG (exp, 0);
3030 tree src = CALL_EXPR_ARG (exp, 1);
3031 tree len = CALL_EXPR_ARG (exp, 2);
3032 const char *src_str;
3033 unsigned int src_align = get_pointer_alignment (src);
3034 unsigned int dest_align = get_pointer_alignment (dest);
3035 rtx dest_mem, src_mem, dest_addr, len_rtx;
3036 HOST_WIDE_INT expected_size = -1;
3037 unsigned int expected_align = 0;
3039 /* If DEST is not a pointer type, call the normal function. */
3040 if (dest_align == 0)
3041 return NULL_RTX;
3043 /* If either SRC is not a pointer type, don't do this
3044 operation in-line. */
3045 if (src_align == 0)
3046 return NULL_RTX;
3048 if (currently_expanding_gimple_stmt)
3049 stringop_block_profile (currently_expanding_gimple_stmt,
3050 &expected_align, &expected_size);
3052 if (expected_align < dest_align)
3053 expected_align = dest_align;
3054 dest_mem = get_memory_rtx (dest, len);
3055 set_mem_align (dest_mem, dest_align);
3056 len_rtx = expand_normal (len);
3057 src_str = c_getstr (src);
3059 /* If SRC is a string constant and block move would be done
3060 by pieces, we can avoid loading the string from memory
3061 and only stored the computed constants. */
3062 if (src_str
3063 && CONST_INT_P (len_rtx)
3064 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3065 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3066 CONST_CAST (char *, src_str),
3067 dest_align, false))
3069 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3070 builtin_memcpy_read_str,
3071 CONST_CAST (char *, src_str),
3072 dest_align, false, 0);
3073 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3074 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3075 return dest_mem;
3078 src_mem = get_memory_rtx (src, len);
3079 set_mem_align (src_mem, src_align);
3081 /* Copy word part most expediently. */
3082 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3083 CALL_EXPR_TAILCALL (exp)
3084 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3085 expected_align, expected_size);
3087 if (dest_addr == 0)
3089 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3090 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3092 return dest_addr;
3096 /* Expand a call EXP to the mempcpy builtin.
3097 Return NULL_RTX if we failed; the caller should emit a normal call,
3098 otherwise try to get the result in TARGET, if convenient (and in
3099 mode MODE if that's convenient). If ENDP is 0 return the
3100 destination pointer, if ENDP is 1 return the end pointer ala
3101 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3102 stpcpy. */
3104 static rtx
3105 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3107 if (!validate_arglist (exp,
3108 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3109 return NULL_RTX;
3110 else
3112 tree dest = CALL_EXPR_ARG (exp, 0);
3113 tree src = CALL_EXPR_ARG (exp, 1);
3114 tree len = CALL_EXPR_ARG (exp, 2);
3115 return expand_builtin_mempcpy_args (dest, src, len,
3116 target, mode, /*endp=*/ 1);
3120 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3121 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3122 so that this can also be called without constructing an actual CALL_EXPR.
3123 The other arguments and return value are the same as for
3124 expand_builtin_mempcpy. */
3126 static rtx
3127 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3128 rtx target, enum machine_mode mode, int endp)
3130 /* If return value is ignored, transform mempcpy into memcpy. */
3131 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3133 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3134 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3135 dest, src, len);
3136 return expand_expr (result, target, mode, EXPAND_NORMAL);
3138 else
3140 const char *src_str;
3141 unsigned int src_align = get_pointer_alignment (src);
3142 unsigned int dest_align = get_pointer_alignment (dest);
3143 rtx dest_mem, src_mem, len_rtx;
3145 /* If either SRC or DEST is not a pointer type, don't do this
3146 operation in-line. */
3147 if (dest_align == 0 || src_align == 0)
3148 return NULL_RTX;
3150 /* If LEN is not constant, call the normal function. */
3151 if (! host_integerp (len, 1))
3152 return NULL_RTX;
3154 len_rtx = expand_normal (len);
3155 src_str = c_getstr (src);
3157 /* If SRC is a string constant and block move would be done
3158 by pieces, we can avoid loading the string from memory
3159 and only stored the computed constants. */
3160 if (src_str
3161 && CONST_INT_P (len_rtx)
3162 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3163 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3164 CONST_CAST (char *, src_str),
3165 dest_align, false))
3167 dest_mem = get_memory_rtx (dest, len);
3168 set_mem_align (dest_mem, dest_align);
3169 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3170 builtin_memcpy_read_str,
3171 CONST_CAST (char *, src_str),
3172 dest_align, false, endp);
3173 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3174 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3175 return dest_mem;
3178 if (CONST_INT_P (len_rtx)
3179 && can_move_by_pieces (INTVAL (len_rtx),
3180 MIN (dest_align, src_align)))
3182 dest_mem = get_memory_rtx (dest, len);
3183 set_mem_align (dest_mem, dest_align);
3184 src_mem = get_memory_rtx (src, len);
3185 set_mem_align (src_mem, src_align);
3186 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3187 MIN (dest_align, src_align), endp);
3188 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3189 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3190 return dest_mem;
3193 return NULL_RTX;
3197 #ifndef HAVE_movstr
3198 # define HAVE_movstr 0
3199 # define CODE_FOR_movstr CODE_FOR_nothing
3200 #endif
3202 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3203 we failed, the caller should emit a normal call, otherwise try to
3204 get the result in TARGET, if convenient. If ENDP is 0 return the
3205 destination pointer, if ENDP is 1 return the end pointer ala
3206 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3207 stpcpy. */
3209 static rtx
3210 expand_movstr (tree dest, tree src, rtx target, int endp)
3212 struct expand_operand ops[3];
3213 rtx dest_mem;
3214 rtx src_mem;
3216 if (!HAVE_movstr)
3217 return NULL_RTX;
3219 dest_mem = get_memory_rtx (dest, NULL);
3220 src_mem = get_memory_rtx (src, NULL);
3221 if (!endp)
3223 target = force_reg (Pmode, XEXP (dest_mem, 0));
3224 dest_mem = replace_equiv_address (dest_mem, target);
3227 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3228 create_fixed_operand (&ops[1], dest_mem);
3229 create_fixed_operand (&ops[2], src_mem);
3230 expand_insn (CODE_FOR_movstr, 3, ops);
3232 if (endp && target != const0_rtx)
3234 target = ops[0].value;
3235 /* movstr is supposed to set end to the address of the NUL
3236 terminator. If the caller requested a mempcpy-like return value,
3237 adjust it. */
3238 if (endp == 1)
3240 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), target), 1);
3241 emit_move_insn (target, force_operand (tem, NULL_RTX));
3244 return target;
3247 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3248 NULL_RTX if we failed the caller should emit a normal call, otherwise
3249 try to get the result in TARGET, if convenient (and in mode MODE if that's
3250 convenient). */
3252 static rtx
3253 expand_builtin_strcpy (tree exp, rtx target)
3255 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3257 tree dest = CALL_EXPR_ARG (exp, 0);
3258 tree src = CALL_EXPR_ARG (exp, 1);
3259 return expand_builtin_strcpy_args (dest, src, target);
3261 return NULL_RTX;
3264 /* Helper function to do the actual work for expand_builtin_strcpy. The
3265 arguments to the builtin_strcpy call DEST and SRC are broken out
3266 so that this can also be called without constructing an actual CALL_EXPR.
3267 The other arguments and return value are the same as for
3268 expand_builtin_strcpy. */
3270 static rtx
3271 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3273 return expand_movstr (dest, src, target, /*endp=*/0);
3276 /* Expand a call EXP to the stpcpy builtin.
3277 Return NULL_RTX if we failed the caller should emit a normal call,
3278 otherwise try to get the result in TARGET, if convenient (and in
3279 mode MODE if that's convenient). */
3281 static rtx
3282 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3284 tree dst, src;
3285 location_t loc = EXPR_LOCATION (exp);
3287 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3288 return NULL_RTX;
3290 dst = CALL_EXPR_ARG (exp, 0);
3291 src = CALL_EXPR_ARG (exp, 1);
3293 /* If return value is ignored, transform stpcpy into strcpy. */
3294 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3296 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3297 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3298 return expand_expr (result, target, mode, EXPAND_NORMAL);
3300 else
3302 tree len, lenp1;
3303 rtx ret;
3305 /* Ensure we get an actual string whose length can be evaluated at
3306 compile-time, not an expression containing a string. This is
3307 because the latter will potentially produce pessimized code
3308 when used to produce the return value. */
3309 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3310 return expand_movstr (dst, src, target, /*endp=*/2);
3312 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3313 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3314 target, mode, /*endp=*/2);
3316 if (ret)
3317 return ret;
3319 if (TREE_CODE (len) == INTEGER_CST)
3321 rtx len_rtx = expand_normal (len);
3323 if (CONST_INT_P (len_rtx))
3325 ret = expand_builtin_strcpy_args (dst, src, target);
3327 if (ret)
3329 if (! target)
3331 if (mode != VOIDmode)
3332 target = gen_reg_rtx (mode);
3333 else
3334 target = gen_reg_rtx (GET_MODE (ret));
3336 if (GET_MODE (target) != GET_MODE (ret))
3337 ret = gen_lowpart (GET_MODE (target), ret);
3339 ret = plus_constant (ret, INTVAL (len_rtx));
3340 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3341 gcc_assert (ret);
3343 return target;
3348 return expand_movstr (dst, src, target, /*endp=*/2);
3352 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3353 bytes from constant string DATA + OFFSET and return it as target
3354 constant. */
3357 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3358 enum machine_mode mode)
3360 const char *str = (const char *) data;
3362 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3363 return const0_rtx;
3365 return c_readstr (str + offset, mode);
3368 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3369 NULL_RTX if we failed the caller should emit a normal call. */
3371 static rtx
3372 expand_builtin_strncpy (tree exp, rtx target)
3374 location_t loc = EXPR_LOCATION (exp);
3376 if (validate_arglist (exp,
3377 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3379 tree dest = CALL_EXPR_ARG (exp, 0);
3380 tree src = CALL_EXPR_ARG (exp, 1);
3381 tree len = CALL_EXPR_ARG (exp, 2);
3382 tree slen = c_strlen (src, 1);
3384 /* We must be passed a constant len and src parameter. */
3385 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3386 return NULL_RTX;
3388 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3390 /* We're required to pad with trailing zeros if the requested
3391 len is greater than strlen(s2)+1. In that case try to
3392 use store_by_pieces, if it fails, punt. */
3393 if (tree_int_cst_lt (slen, len))
3395 unsigned int dest_align = get_pointer_alignment (dest);
3396 const char *p = c_getstr (src);
3397 rtx dest_mem;
3399 if (!p || dest_align == 0 || !host_integerp (len, 1)
3400 || !can_store_by_pieces (tree_low_cst (len, 1),
3401 builtin_strncpy_read_str,
3402 CONST_CAST (char *, p),
3403 dest_align, false))
3404 return NULL_RTX;
3406 dest_mem = get_memory_rtx (dest, len);
3407 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3408 builtin_strncpy_read_str,
3409 CONST_CAST (char *, p), dest_align, false, 0);
3410 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3411 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3412 return dest_mem;
3415 return NULL_RTX;
3418 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3419 bytes from constant string DATA + OFFSET and return it as target
3420 constant. */
3423 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3424 enum machine_mode mode)
3426 const char *c = (const char *) data;
3427 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3429 memset (p, *c, GET_MODE_SIZE (mode));
3431 return c_readstr (p, mode);
3434 /* Callback routine for store_by_pieces. Return the RTL of a register
3435 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3436 char value given in the RTL register data. For example, if mode is
3437 4 bytes wide, return the RTL for 0x01010101*data. */
3439 static rtx
3440 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3441 enum machine_mode mode)
3443 rtx target, coeff;
3444 size_t size;
3445 char *p;
3447 size = GET_MODE_SIZE (mode);
3448 if (size == 1)
3449 return (rtx) data;
3451 p = XALLOCAVEC (char, size);
3452 memset (p, 1, size);
3453 coeff = c_readstr (p, mode);
3455 target = convert_to_mode (mode, (rtx) data, 1);
3456 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3457 return force_reg (mode, target);
3460 /* Expand expression EXP, which is a call to the memset builtin. Return
3461 NULL_RTX if we failed the caller should emit a normal call, otherwise
3462 try to get the result in TARGET, if convenient (and in mode MODE if that's
3463 convenient). */
3465 static rtx
3466 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3468 if (!validate_arglist (exp,
3469 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3470 return NULL_RTX;
3471 else
3473 tree dest = CALL_EXPR_ARG (exp, 0);
3474 tree val = CALL_EXPR_ARG (exp, 1);
3475 tree len = CALL_EXPR_ARG (exp, 2);
3476 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3480 /* Helper function to do the actual work for expand_builtin_memset. The
3481 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3482 so that this can also be called without constructing an actual CALL_EXPR.
3483 The other arguments and return value are the same as for
3484 expand_builtin_memset. */
3486 static rtx
3487 expand_builtin_memset_args (tree dest, tree val, tree len,
3488 rtx target, enum machine_mode mode, tree orig_exp)
3490 tree fndecl, fn;
3491 enum built_in_function fcode;
3492 enum machine_mode val_mode;
3493 char c;
3494 unsigned int dest_align;
3495 rtx dest_mem, dest_addr, len_rtx;
3496 HOST_WIDE_INT expected_size = -1;
3497 unsigned int expected_align = 0;
3499 dest_align = get_pointer_alignment (dest);
3501 /* If DEST is not a pointer type, don't do this operation in-line. */
3502 if (dest_align == 0)
3503 return NULL_RTX;
3505 if (currently_expanding_gimple_stmt)
3506 stringop_block_profile (currently_expanding_gimple_stmt,
3507 &expected_align, &expected_size);
3509 if (expected_align < dest_align)
3510 expected_align = dest_align;
3512 /* If the LEN parameter is zero, return DEST. */
3513 if (integer_zerop (len))
3515 /* Evaluate and ignore VAL in case it has side-effects. */
3516 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3517 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3520 /* Stabilize the arguments in case we fail. */
3521 dest = builtin_save_expr (dest);
3522 val = builtin_save_expr (val);
3523 len = builtin_save_expr (len);
3525 len_rtx = expand_normal (len);
3526 dest_mem = get_memory_rtx (dest, len);
3527 val_mode = TYPE_MODE (unsigned_char_type_node);
3529 if (TREE_CODE (val) != INTEGER_CST)
3531 rtx val_rtx;
3533 val_rtx = expand_normal (val);
3534 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3536 /* Assume that we can memset by pieces if we can store
3537 * the coefficients by pieces (in the required modes).
3538 * We can't pass builtin_memset_gen_str as that emits RTL. */
3539 c = 1;
3540 if (host_integerp (len, 1)
3541 && can_store_by_pieces (tree_low_cst (len, 1),
3542 builtin_memset_read_str, &c, dest_align,
3543 true))
3545 val_rtx = force_reg (val_mode, val_rtx);
3546 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3547 builtin_memset_gen_str, val_rtx, dest_align,
3548 true, 0);
3550 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3551 dest_align, expected_align,
3552 expected_size))
3553 goto do_libcall;
3555 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3556 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3557 return dest_mem;
3560 if (target_char_cast (val, &c))
3561 goto do_libcall;
3563 if (c)
3565 if (host_integerp (len, 1)
3566 && can_store_by_pieces (tree_low_cst (len, 1),
3567 builtin_memset_read_str, &c, dest_align,
3568 true))
3569 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3570 builtin_memset_read_str, &c, dest_align, true, 0);
3571 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3572 gen_int_mode (c, val_mode),
3573 dest_align, expected_align,
3574 expected_size))
3575 goto do_libcall;
3577 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3578 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3579 return dest_mem;
3582 set_mem_align (dest_mem, dest_align);
3583 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3584 CALL_EXPR_TAILCALL (orig_exp)
3585 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3586 expected_align, expected_size);
3588 if (dest_addr == 0)
3590 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3591 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3594 return dest_addr;
3596 do_libcall:
3597 fndecl = get_callee_fndecl (orig_exp);
3598 fcode = DECL_FUNCTION_CODE (fndecl);
3599 if (fcode == BUILT_IN_MEMSET)
3600 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3601 dest, val, len);
3602 else if (fcode == BUILT_IN_BZERO)
3603 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3604 dest, len);
3605 else
3606 gcc_unreachable ();
3607 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3608 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3609 return expand_call (fn, target, target == const0_rtx);
3612 /* Expand expression EXP, which is a call to the bzero builtin. Return
3613 NULL_RTX if we failed the caller should emit a normal call. */
3615 static rtx
3616 expand_builtin_bzero (tree exp)
3618 tree dest, size;
3619 location_t loc = EXPR_LOCATION (exp);
3621 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3622 return NULL_RTX;
3624 dest = CALL_EXPR_ARG (exp, 0);
3625 size = CALL_EXPR_ARG (exp, 1);
3627 /* New argument list transforming bzero(ptr x, int y) to
3628 memset(ptr x, int 0, size_t y). This is done this way
3629 so that if it isn't expanded inline, we fallback to
3630 calling bzero instead of memset. */
3632 return expand_builtin_memset_args (dest, integer_zero_node,
3633 fold_convert_loc (loc, sizetype, size),
3634 const0_rtx, VOIDmode, exp);
3637 /* Expand expression EXP, which is a call to the memcmp built-in function.
3638 Return NULL_RTX if we failed and the caller should emit a normal call,
3639 otherwise try to get the result in TARGET, if convenient (and in mode
3640 MODE, if that's convenient). */
3642 static rtx
3643 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3644 ATTRIBUTE_UNUSED enum machine_mode mode)
3646 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3648 if (!validate_arglist (exp,
3649 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3650 return NULL_RTX;
3652 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3653 implementing memcmp because it will stop if it encounters two
3654 zero bytes. */
3655 #if defined HAVE_cmpmemsi
3657 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3658 rtx result;
3659 rtx insn;
3660 tree arg1 = CALL_EXPR_ARG (exp, 0);
3661 tree arg2 = CALL_EXPR_ARG (exp, 1);
3662 tree len = CALL_EXPR_ARG (exp, 2);
3664 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3665 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3666 enum machine_mode insn_mode;
3668 if (HAVE_cmpmemsi)
3669 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3670 else
3671 return NULL_RTX;
3673 /* If we don't have POINTER_TYPE, call the function. */
3674 if (arg1_align == 0 || arg2_align == 0)
3675 return NULL_RTX;
3677 /* Make a place to write the result of the instruction. */
3678 result = target;
3679 if (! (result != 0
3680 && REG_P (result) && GET_MODE (result) == insn_mode
3681 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3682 result = gen_reg_rtx (insn_mode);
3684 arg1_rtx = get_memory_rtx (arg1, len);
3685 arg2_rtx = get_memory_rtx (arg2, len);
3686 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3688 /* Set MEM_SIZE as appropriate. */
3689 if (CONST_INT_P (arg3_rtx))
3691 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3692 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3695 if (HAVE_cmpmemsi)
3696 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3697 GEN_INT (MIN (arg1_align, arg2_align)));
3698 else
3699 gcc_unreachable ();
3701 if (insn)
3702 emit_insn (insn);
3703 else
3704 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3705 TYPE_MODE (integer_type_node), 3,
3706 XEXP (arg1_rtx, 0), Pmode,
3707 XEXP (arg2_rtx, 0), Pmode,
3708 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3709 TYPE_UNSIGNED (sizetype)),
3710 TYPE_MODE (sizetype));
3712 /* Return the value in the proper mode for this function. */
3713 mode = TYPE_MODE (TREE_TYPE (exp));
3714 if (GET_MODE (result) == mode)
3715 return result;
3716 else if (target != 0)
3718 convert_move (target, result, 0);
3719 return target;
3721 else
3722 return convert_to_mode (mode, result, 0);
3724 #endif /* HAVE_cmpmemsi. */
3726 return NULL_RTX;
3729 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3730 if we failed the caller should emit a normal call, otherwise try to get
3731 the result in TARGET, if convenient. */
3733 static rtx
3734 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3736 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3737 return NULL_RTX;
3739 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3740 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3741 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3743 rtx arg1_rtx, arg2_rtx;
3744 rtx result, insn = NULL_RTX;
3745 tree fndecl, fn;
3746 tree arg1 = CALL_EXPR_ARG (exp, 0);
3747 tree arg2 = CALL_EXPR_ARG (exp, 1);
3749 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3750 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3752 /* If we don't have POINTER_TYPE, call the function. */
3753 if (arg1_align == 0 || arg2_align == 0)
3754 return NULL_RTX;
3756 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3757 arg1 = builtin_save_expr (arg1);
3758 arg2 = builtin_save_expr (arg2);
3760 arg1_rtx = get_memory_rtx (arg1, NULL);
3761 arg2_rtx = get_memory_rtx (arg2, NULL);
3763 #ifdef HAVE_cmpstrsi
3764 /* Try to call cmpstrsi. */
3765 if (HAVE_cmpstrsi)
3767 enum machine_mode insn_mode
3768 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3770 /* Make a place to write the result of the instruction. */
3771 result = target;
3772 if (! (result != 0
3773 && REG_P (result) && GET_MODE (result) == insn_mode
3774 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3775 result = gen_reg_rtx (insn_mode);
3777 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3778 GEN_INT (MIN (arg1_align, arg2_align)));
3780 #endif
3781 #ifdef HAVE_cmpstrnsi
3782 /* Try to determine at least one length and call cmpstrnsi. */
3783 if (!insn && HAVE_cmpstrnsi)
3785 tree len;
3786 rtx arg3_rtx;
3788 enum machine_mode insn_mode
3789 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3790 tree len1 = c_strlen (arg1, 1);
3791 tree len2 = c_strlen (arg2, 1);
3793 if (len1)
3794 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3795 if (len2)
3796 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3798 /* If we don't have a constant length for the first, use the length
3799 of the second, if we know it. We don't require a constant for
3800 this case; some cost analysis could be done if both are available
3801 but neither is constant. For now, assume they're equally cheap,
3802 unless one has side effects. If both strings have constant lengths,
3803 use the smaller. */
3805 if (!len1)
3806 len = len2;
3807 else if (!len2)
3808 len = len1;
3809 else if (TREE_SIDE_EFFECTS (len1))
3810 len = len2;
3811 else if (TREE_SIDE_EFFECTS (len2))
3812 len = len1;
3813 else if (TREE_CODE (len1) != INTEGER_CST)
3814 len = len2;
3815 else if (TREE_CODE (len2) != INTEGER_CST)
3816 len = len1;
3817 else if (tree_int_cst_lt (len1, len2))
3818 len = len1;
3819 else
3820 len = len2;
3822 /* If both arguments have side effects, we cannot optimize. */
3823 if (!len || TREE_SIDE_EFFECTS (len))
3824 goto do_libcall;
3826 arg3_rtx = expand_normal (len);
3828 /* Make a place to write the result of the instruction. */
3829 result = target;
3830 if (! (result != 0
3831 && REG_P (result) && GET_MODE (result) == insn_mode
3832 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3833 result = gen_reg_rtx (insn_mode);
3835 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3836 GEN_INT (MIN (arg1_align, arg2_align)));
3838 #endif
3840 if (insn)
3842 enum machine_mode mode;
3843 emit_insn (insn);
3845 /* Return the value in the proper mode for this function. */
3846 mode = TYPE_MODE (TREE_TYPE (exp));
3847 if (GET_MODE (result) == mode)
3848 return result;
3849 if (target == 0)
3850 return convert_to_mode (mode, result, 0);
3851 convert_move (target, result, 0);
3852 return target;
3855 /* Expand the library call ourselves using a stabilized argument
3856 list to avoid re-evaluating the function's arguments twice. */
3857 #ifdef HAVE_cmpstrnsi
3858 do_libcall:
3859 #endif
3860 fndecl = get_callee_fndecl (exp);
3861 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3862 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3863 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3864 return expand_call (fn, target, target == const0_rtx);
3866 #endif
3867 return NULL_RTX;
3870 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3871 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3872 the result in TARGET, if convenient. */
3874 static rtx
3875 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3876 ATTRIBUTE_UNUSED enum machine_mode mode)
3878 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3880 if (!validate_arglist (exp,
3881 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3882 return NULL_RTX;
3884 /* If c_strlen can determine an expression for one of the string
3885 lengths, and it doesn't have side effects, then emit cmpstrnsi
3886 using length MIN(strlen(string)+1, arg3). */
3887 #ifdef HAVE_cmpstrnsi
3888 if (HAVE_cmpstrnsi)
3890 tree len, len1, len2;
3891 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3892 rtx result, insn;
3893 tree fndecl, fn;
3894 tree arg1 = CALL_EXPR_ARG (exp, 0);
3895 tree arg2 = CALL_EXPR_ARG (exp, 1);
3896 tree arg3 = CALL_EXPR_ARG (exp, 2);
3898 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3899 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3900 enum machine_mode insn_mode
3901 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3903 len1 = c_strlen (arg1, 1);
3904 len2 = c_strlen (arg2, 1);
3906 if (len1)
3907 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3908 if (len2)
3909 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3911 /* If we don't have a constant length for the first, use the length
3912 of the second, if we know it. We don't require a constant for
3913 this case; some cost analysis could be done if both are available
3914 but neither is constant. For now, assume they're equally cheap,
3915 unless one has side effects. If both strings have constant lengths,
3916 use the smaller. */
3918 if (!len1)
3919 len = len2;
3920 else if (!len2)
3921 len = len1;
3922 else if (TREE_SIDE_EFFECTS (len1))
3923 len = len2;
3924 else if (TREE_SIDE_EFFECTS (len2))
3925 len = len1;
3926 else if (TREE_CODE (len1) != INTEGER_CST)
3927 len = len2;
3928 else if (TREE_CODE (len2) != INTEGER_CST)
3929 len = len1;
3930 else if (tree_int_cst_lt (len1, len2))
3931 len = len1;
3932 else
3933 len = len2;
3935 /* If both arguments have side effects, we cannot optimize. */
3936 if (!len || TREE_SIDE_EFFECTS (len))
3937 return NULL_RTX;
3939 /* The actual new length parameter is MIN(len,arg3). */
3940 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3941 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3943 /* If we don't have POINTER_TYPE, call the function. */
3944 if (arg1_align == 0 || arg2_align == 0)
3945 return NULL_RTX;
3947 /* Make a place to write the result of the instruction. */
3948 result = target;
3949 if (! (result != 0
3950 && REG_P (result) && GET_MODE (result) == insn_mode
3951 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3952 result = gen_reg_rtx (insn_mode);
3954 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3955 arg1 = builtin_save_expr (arg1);
3956 arg2 = builtin_save_expr (arg2);
3957 len = builtin_save_expr (len);
3959 arg1_rtx = get_memory_rtx (arg1, len);
3960 arg2_rtx = get_memory_rtx (arg2, len);
3961 arg3_rtx = expand_normal (len);
3962 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3963 GEN_INT (MIN (arg1_align, arg2_align)));
3964 if (insn)
3966 emit_insn (insn);
3968 /* Return the value in the proper mode for this function. */
3969 mode = TYPE_MODE (TREE_TYPE (exp));
3970 if (GET_MODE (result) == mode)
3971 return result;
3972 if (target == 0)
3973 return convert_to_mode (mode, result, 0);
3974 convert_move (target, result, 0);
3975 return target;
3978 /* Expand the library call ourselves using a stabilized argument
3979 list to avoid re-evaluating the function's arguments twice. */
3980 fndecl = get_callee_fndecl (exp);
3981 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3982 arg1, arg2, len);
3983 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3984 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3985 return expand_call (fn, target, target == const0_rtx);
3987 #endif
3988 return NULL_RTX;
3991 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3992 if that's convenient. */
3995 expand_builtin_saveregs (void)
3997 rtx val, seq;
3999 /* Don't do __builtin_saveregs more than once in a function.
4000 Save the result of the first call and reuse it. */
4001 if (saveregs_value != 0)
4002 return saveregs_value;
4004 /* When this function is called, it means that registers must be
4005 saved on entry to this function. So we migrate the call to the
4006 first insn of this function. */
4008 start_sequence ();
4010 /* Do whatever the machine needs done in this case. */
4011 val = targetm.calls.expand_builtin_saveregs ();
4013 seq = get_insns ();
4014 end_sequence ();
4016 saveregs_value = val;
4018 /* Put the insns after the NOTE that starts the function. If this
4019 is inside a start_sequence, make the outer-level insn chain current, so
4020 the code is placed at the start of the function. */
4021 push_topmost_sequence ();
4022 emit_insn_after (seq, entry_of_function ());
4023 pop_topmost_sequence ();
4025 return val;
4028 /* Expand a call to __builtin_next_arg. */
4030 static rtx
4031 expand_builtin_next_arg (void)
4033 /* Checking arguments is already done in fold_builtin_next_arg
4034 that must be called before this function. */
4035 return expand_binop (ptr_mode, add_optab,
4036 crtl->args.internal_arg_pointer,
4037 crtl->args.arg_offset_rtx,
4038 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4041 /* Make it easier for the backends by protecting the valist argument
4042 from multiple evaluations. */
4044 static tree
4045 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4047 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4049 /* The current way of determining the type of valist is completely
4050 bogus. We should have the information on the va builtin instead. */
4051 if (!vatype)
4052 vatype = targetm.fn_abi_va_list (cfun->decl);
4054 if (TREE_CODE (vatype) == ARRAY_TYPE)
4056 if (TREE_SIDE_EFFECTS (valist))
4057 valist = save_expr (valist);
4059 /* For this case, the backends will be expecting a pointer to
4060 vatype, but it's possible we've actually been given an array
4061 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4062 So fix it. */
4063 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4065 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4066 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4069 else
4071 tree pt = build_pointer_type (vatype);
4073 if (! needs_lvalue)
4075 if (! TREE_SIDE_EFFECTS (valist))
4076 return valist;
4078 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4079 TREE_SIDE_EFFECTS (valist) = 1;
4082 if (TREE_SIDE_EFFECTS (valist))
4083 valist = save_expr (valist);
4084 valist = fold_build2_loc (loc, MEM_REF,
4085 vatype, valist, build_int_cst (pt, 0));
4088 return valist;
4091 /* The "standard" definition of va_list is void*. */
4093 tree
4094 std_build_builtin_va_list (void)
4096 return ptr_type_node;
4099 /* The "standard" abi va_list is va_list_type_node. */
4101 tree
4102 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4104 return va_list_type_node;
4107 /* The "standard" type of va_list is va_list_type_node. */
4109 tree
4110 std_canonical_va_list_type (tree type)
4112 tree wtype, htype;
4114 if (INDIRECT_REF_P (type))
4115 type = TREE_TYPE (type);
4116 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4117 type = TREE_TYPE (type);
4118 wtype = va_list_type_node;
4119 htype = type;
4120 /* Treat structure va_list types. */
4121 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4122 htype = TREE_TYPE (htype);
4123 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4125 /* If va_list is an array type, the argument may have decayed
4126 to a pointer type, e.g. by being passed to another function.
4127 In that case, unwrap both types so that we can compare the
4128 underlying records. */
4129 if (TREE_CODE (htype) == ARRAY_TYPE
4130 || POINTER_TYPE_P (htype))
4132 wtype = TREE_TYPE (wtype);
4133 htype = TREE_TYPE (htype);
4136 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4137 return va_list_type_node;
4139 return NULL_TREE;
4142 /* The "standard" implementation of va_start: just assign `nextarg' to
4143 the variable. */
4145 void
4146 std_expand_builtin_va_start (tree valist, rtx nextarg)
4148 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4149 convert_move (va_r, nextarg, 0);
4152 /* Expand EXP, a call to __builtin_va_start. */
4154 static rtx
4155 expand_builtin_va_start (tree exp)
4157 rtx nextarg;
4158 tree valist;
4159 location_t loc = EXPR_LOCATION (exp);
4161 if (call_expr_nargs (exp) < 2)
4163 error_at (loc, "too few arguments to function %<va_start%>");
4164 return const0_rtx;
4167 if (fold_builtin_next_arg (exp, true))
4168 return const0_rtx;
4170 nextarg = expand_builtin_next_arg ();
4171 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4173 if (targetm.expand_builtin_va_start)
4174 targetm.expand_builtin_va_start (valist, nextarg);
4175 else
4176 std_expand_builtin_va_start (valist, nextarg);
4178 return const0_rtx;
4181 /* The "standard" implementation of va_arg: read the value from the
4182 current (padded) address and increment by the (padded) size. */
4184 tree
4185 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4186 gimple_seq *post_p)
4188 tree addr, t, type_size, rounded_size, valist_tmp;
4189 unsigned HOST_WIDE_INT align, boundary;
4190 bool indirect;
4192 #ifdef ARGS_GROW_DOWNWARD
4193 /* All of the alignment and movement below is for args-grow-up machines.
4194 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4195 implement their own specialized gimplify_va_arg_expr routines. */
4196 gcc_unreachable ();
4197 #endif
4199 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4200 if (indirect)
4201 type = build_pointer_type (type);
4203 align = PARM_BOUNDARY / BITS_PER_UNIT;
4204 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4206 /* When we align parameter on stack for caller, if the parameter
4207 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4208 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4209 here with caller. */
4210 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4211 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4213 boundary /= BITS_PER_UNIT;
4215 /* Hoist the valist value into a temporary for the moment. */
4216 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4218 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4219 requires greater alignment, we must perform dynamic alignment. */
4220 if (boundary > align
4221 && !integer_zerop (TYPE_SIZE (type)))
4223 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4224 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
4225 gimplify_and_add (t, pre_p);
4227 t = fold_convert (sizetype, valist_tmp);
4228 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4229 fold_convert (TREE_TYPE (valist),
4230 fold_build2 (BIT_AND_EXPR, sizetype, t,
4231 size_int (-boundary))));
4232 gimplify_and_add (t, pre_p);
4234 else
4235 boundary = align;
4237 /* If the actual alignment is less than the alignment of the type,
4238 adjust the type accordingly so that we don't assume strict alignment
4239 when dereferencing the pointer. */
4240 boundary *= BITS_PER_UNIT;
4241 if (boundary < TYPE_ALIGN (type))
4243 type = build_variant_type_copy (type);
4244 TYPE_ALIGN (type) = boundary;
4247 /* Compute the rounded size of the type. */
4248 type_size = size_in_bytes (type);
4249 rounded_size = round_up (type_size, align);
4251 /* Reduce rounded_size so it's sharable with the postqueue. */
4252 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4254 /* Get AP. */
4255 addr = valist_tmp;
4256 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4258 /* Small args are padded downward. */
4259 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4260 rounded_size, size_int (align));
4261 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4262 size_binop (MINUS_EXPR, rounded_size, type_size));
4263 addr = fold_build_pointer_plus (addr, t);
4266 /* Compute new value for AP. */
4267 t = fold_build_pointer_plus (valist_tmp, rounded_size);
4268 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4269 gimplify_and_add (t, pre_p);
4271 addr = fold_convert (build_pointer_type (type), addr);
4273 if (indirect)
4274 addr = build_va_arg_indirect_ref (addr);
4276 return build_va_arg_indirect_ref (addr);
4279 /* Build an indirect-ref expression over the given TREE, which represents a
4280 piece of a va_arg() expansion. */
4281 tree
4282 build_va_arg_indirect_ref (tree addr)
4284 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4286 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4287 mf_mark (addr);
4289 return addr;
4292 /* Return a dummy expression of type TYPE in order to keep going after an
4293 error. */
4295 static tree
4296 dummy_object (tree type)
4298 tree t = build_int_cst (build_pointer_type (type), 0);
4299 return build2 (MEM_REF, type, t, t);
4302 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4303 builtin function, but a very special sort of operator. */
4305 enum gimplify_status
4306 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4308 tree promoted_type, have_va_type;
4309 tree valist = TREE_OPERAND (*expr_p, 0);
4310 tree type = TREE_TYPE (*expr_p);
4311 tree t;
4312 location_t loc = EXPR_LOCATION (*expr_p);
4314 /* Verify that valist is of the proper type. */
4315 have_va_type = TREE_TYPE (valist);
4316 if (have_va_type == error_mark_node)
4317 return GS_ERROR;
4318 have_va_type = targetm.canonical_va_list_type (have_va_type);
4320 if (have_va_type == NULL_TREE)
4322 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4323 return GS_ERROR;
4326 /* Generate a diagnostic for requesting data of a type that cannot
4327 be passed through `...' due to type promotion at the call site. */
4328 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4329 != type)
4331 static bool gave_help;
4332 bool warned;
4334 /* Unfortunately, this is merely undefined, rather than a constraint
4335 violation, so we cannot make this an error. If this call is never
4336 executed, the program is still strictly conforming. */
4337 warned = warning_at (loc, 0,
4338 "%qT is promoted to %qT when passed through %<...%>",
4339 type, promoted_type);
4340 if (!gave_help && warned)
4342 gave_help = true;
4343 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4344 promoted_type, type);
4347 /* We can, however, treat "undefined" any way we please.
4348 Call abort to encourage the user to fix the program. */
4349 if (warned)
4350 inform (loc, "if this code is reached, the program will abort");
4351 /* Before the abort, allow the evaluation of the va_list
4352 expression to exit or longjmp. */
4353 gimplify_and_add (valist, pre_p);
4354 t = build_call_expr_loc (loc,
4355 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4356 gimplify_and_add (t, pre_p);
4358 /* This is dead code, but go ahead and finish so that the
4359 mode of the result comes out right. */
4360 *expr_p = dummy_object (type);
4361 return GS_ALL_DONE;
4363 else
4365 /* Make it easier for the backends by protecting the valist argument
4366 from multiple evaluations. */
4367 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4369 /* For this case, the backends will be expecting a pointer to
4370 TREE_TYPE (abi), but it's possible we've
4371 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4372 So fix it. */
4373 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4375 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4376 valist = fold_convert_loc (loc, p1,
4377 build_fold_addr_expr_loc (loc, valist));
4380 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4382 else
4383 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4385 if (!targetm.gimplify_va_arg_expr)
4386 /* FIXME: Once most targets are converted we should merely
4387 assert this is non-null. */
4388 return GS_ALL_DONE;
4390 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4391 return GS_OK;
4395 /* Expand EXP, a call to __builtin_va_end. */
4397 static rtx
4398 expand_builtin_va_end (tree exp)
4400 tree valist = CALL_EXPR_ARG (exp, 0);
4402 /* Evaluate for side effects, if needed. I hate macros that don't
4403 do that. */
4404 if (TREE_SIDE_EFFECTS (valist))
4405 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4407 return const0_rtx;
4410 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4411 builtin rather than just as an assignment in stdarg.h because of the
4412 nastiness of array-type va_list types. */
4414 static rtx
4415 expand_builtin_va_copy (tree exp)
4417 tree dst, src, t;
4418 location_t loc = EXPR_LOCATION (exp);
4420 dst = CALL_EXPR_ARG (exp, 0);
4421 src = CALL_EXPR_ARG (exp, 1);
4423 dst = stabilize_va_list_loc (loc, dst, 1);
4424 src = stabilize_va_list_loc (loc, src, 0);
4426 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4428 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4430 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4431 TREE_SIDE_EFFECTS (t) = 1;
4432 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4434 else
4436 rtx dstb, srcb, size;
4438 /* Evaluate to pointers. */
4439 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4440 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4441 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4442 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4444 dstb = convert_memory_address (Pmode, dstb);
4445 srcb = convert_memory_address (Pmode, srcb);
4447 /* "Dereference" to BLKmode memories. */
4448 dstb = gen_rtx_MEM (BLKmode, dstb);
4449 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4450 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4451 srcb = gen_rtx_MEM (BLKmode, srcb);
4452 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4453 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4455 /* Copy. */
4456 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4459 return const0_rtx;
4462 /* Expand a call to one of the builtin functions __builtin_frame_address or
4463 __builtin_return_address. */
4465 static rtx
4466 expand_builtin_frame_address (tree fndecl, tree exp)
4468 /* The argument must be a nonnegative integer constant.
4469 It counts the number of frames to scan up the stack.
4470 The value is the return address saved in that frame. */
4471 if (call_expr_nargs (exp) == 0)
4472 /* Warning about missing arg was already issued. */
4473 return const0_rtx;
4474 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4476 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4477 error ("invalid argument to %<__builtin_frame_address%>");
4478 else
4479 error ("invalid argument to %<__builtin_return_address%>");
4480 return const0_rtx;
4482 else
4484 rtx tem
4485 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4486 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4488 /* Some ports cannot access arbitrary stack frames. */
4489 if (tem == NULL)
4491 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4492 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4493 else
4494 warning (0, "unsupported argument to %<__builtin_return_address%>");
4495 return const0_rtx;
4498 /* For __builtin_frame_address, return what we've got. */
4499 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4500 return tem;
4502 if (!REG_P (tem)
4503 && ! CONSTANT_P (tem))
4504 tem = copy_to_mode_reg (Pmode, tem);
4505 return tem;
4509 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4510 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4511 is the same as for allocate_dynamic_stack_space. */
4513 static rtx
4514 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4516 rtx op0;
4517 rtx result;
4519 /* Emit normal call if marked not-inlineable. */
4520 if (CALL_CANNOT_INLINE_P (exp))
4521 return NULL_RTX;
4523 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4524 return NULL_RTX;
4526 /* Compute the argument. */
4527 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4529 /* Allocate the desired space. */
4530 result = allocate_dynamic_stack_space (op0, 0, BIGGEST_ALIGNMENT,
4531 cannot_accumulate);
4532 result = convert_memory_address (ptr_mode, result);
4534 return result;
4537 /* Expand a call to a bswap builtin with argument ARG0. MODE
4538 is the mode to expand with. */
4540 static rtx
4541 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4543 enum machine_mode mode;
4544 tree arg;
4545 rtx op0;
4547 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4548 return NULL_RTX;
4550 arg = CALL_EXPR_ARG (exp, 0);
4551 mode = TYPE_MODE (TREE_TYPE (arg));
4552 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4554 target = expand_unop (mode, bswap_optab, op0, target, 1);
4556 gcc_assert (target);
4558 return convert_to_mode (mode, target, 0);
4561 /* Expand a call to a unary builtin in EXP.
4562 Return NULL_RTX if a normal call should be emitted rather than expanding the
4563 function in-line. If convenient, the result should be placed in TARGET.
4564 SUBTARGET may be used as the target for computing one of EXP's operands. */
4566 static rtx
4567 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4568 rtx subtarget, optab op_optab)
4570 rtx op0;
4572 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4573 return NULL_RTX;
4575 /* Compute the argument. */
4576 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4577 (subtarget
4578 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4579 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4580 VOIDmode, EXPAND_NORMAL);
4581 /* Compute op, into TARGET if possible.
4582 Set TARGET to wherever the result comes back. */
4583 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4584 op_optab, op0, target, op_optab != clrsb_optab);
4585 gcc_assert (target);
4587 return convert_to_mode (target_mode, target, 0);
4590 /* Expand a call to __builtin_expect. We just return our argument
4591 as the builtin_expect semantic should've been already executed by
4592 tree branch prediction pass. */
4594 static rtx
4595 expand_builtin_expect (tree exp, rtx target)
4597 tree arg;
4599 if (call_expr_nargs (exp) < 2)
4600 return const0_rtx;
4601 arg = CALL_EXPR_ARG (exp, 0);
4603 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4604 /* When guessing was done, the hints should be already stripped away. */
4605 gcc_assert (!flag_guess_branch_prob
4606 || optimize == 0 || seen_error ());
4607 return target;
4610 /* Expand a call to __builtin_assume_aligned. We just return our first
4611 argument as the builtin_assume_aligned semantic should've been already
4612 executed by CCP. */
4614 static rtx
4615 expand_builtin_assume_aligned (tree exp, rtx target)
4617 if (call_expr_nargs (exp) < 2)
4618 return const0_rtx;
4619 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4620 EXPAND_NORMAL);
4621 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4622 && (call_expr_nargs (exp) < 3
4623 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4624 return target;
4627 void
4628 expand_builtin_trap (void)
4630 #ifdef HAVE_trap
4631 if (HAVE_trap)
4632 emit_insn (gen_trap ());
4633 else
4634 #endif
4635 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4636 emit_barrier ();
4639 /* Expand a call to __builtin_unreachable. We do nothing except emit
4640 a barrier saying that control flow will not pass here.
4642 It is the responsibility of the program being compiled to ensure
4643 that control flow does never reach __builtin_unreachable. */
4644 static void
4645 expand_builtin_unreachable (void)
4647 emit_barrier ();
4650 /* Expand EXP, a call to fabs, fabsf or fabsl.
4651 Return NULL_RTX if a normal call should be emitted rather than expanding
4652 the function inline. If convenient, the result should be placed
4653 in TARGET. SUBTARGET may be used as the target for computing
4654 the operand. */
4656 static rtx
4657 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4659 enum machine_mode mode;
4660 tree arg;
4661 rtx op0;
4663 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4664 return NULL_RTX;
4666 arg = CALL_EXPR_ARG (exp, 0);
4667 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4668 mode = TYPE_MODE (TREE_TYPE (arg));
4669 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4670 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4673 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4674 Return NULL is a normal call should be emitted rather than expanding the
4675 function inline. If convenient, the result should be placed in TARGET.
4676 SUBTARGET may be used as the target for computing the operand. */
4678 static rtx
4679 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4681 rtx op0, op1;
4682 tree arg;
4684 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4685 return NULL_RTX;
4687 arg = CALL_EXPR_ARG (exp, 0);
4688 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4690 arg = CALL_EXPR_ARG (exp, 1);
4691 op1 = expand_normal (arg);
4693 return expand_copysign (op0, op1, target);
4696 /* Create a new constant string literal and return a char* pointer to it.
4697 The STRING_CST value is the LEN characters at STR. */
4698 tree
4699 build_string_literal (int len, const char *str)
4701 tree t, elem, index, type;
4703 t = build_string (len, str);
4704 elem = build_type_variant (char_type_node, 1, 0);
4705 index = build_index_type (size_int (len - 1));
4706 type = build_array_type (elem, index);
4707 TREE_TYPE (t) = type;
4708 TREE_CONSTANT (t) = 1;
4709 TREE_READONLY (t) = 1;
4710 TREE_STATIC (t) = 1;
4712 type = build_pointer_type (elem);
4713 t = build1 (ADDR_EXPR, type,
4714 build4 (ARRAY_REF, elem,
4715 t, integer_zero_node, NULL_TREE, NULL_TREE));
4716 return t;
4719 /* Expand a call to __builtin___clear_cache. */
4721 static rtx
4722 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4724 #ifndef HAVE_clear_cache
4725 #ifdef CLEAR_INSN_CACHE
4726 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4727 does something. Just do the default expansion to a call to
4728 __clear_cache(). */
4729 return NULL_RTX;
4730 #else
4731 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4732 does nothing. There is no need to call it. Do nothing. */
4733 return const0_rtx;
4734 #endif /* CLEAR_INSN_CACHE */
4735 #else
4736 /* We have a "clear_cache" insn, and it will handle everything. */
4737 tree begin, end;
4738 rtx begin_rtx, end_rtx;
4740 /* We must not expand to a library call. If we did, any
4741 fallback library function in libgcc that might contain a call to
4742 __builtin___clear_cache() would recurse infinitely. */
4743 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4745 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4746 return const0_rtx;
4749 if (HAVE_clear_cache)
4751 struct expand_operand ops[2];
4753 begin = CALL_EXPR_ARG (exp, 0);
4754 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4756 end = CALL_EXPR_ARG (exp, 1);
4757 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4759 create_address_operand (&ops[0], begin_rtx);
4760 create_address_operand (&ops[1], end_rtx);
4761 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4762 return const0_rtx;
4764 return const0_rtx;
4765 #endif /* HAVE_clear_cache */
4768 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4770 static rtx
4771 round_trampoline_addr (rtx tramp)
4773 rtx temp, addend, mask;
4775 /* If we don't need too much alignment, we'll have been guaranteed
4776 proper alignment by get_trampoline_type. */
4777 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4778 return tramp;
4780 /* Round address up to desired boundary. */
4781 temp = gen_reg_rtx (Pmode);
4782 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
4783 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
4785 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4786 temp, 0, OPTAB_LIB_WIDEN);
4787 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4788 temp, 0, OPTAB_LIB_WIDEN);
4790 return tramp;
4793 static rtx
4794 expand_builtin_init_trampoline (tree exp)
4796 tree t_tramp, t_func, t_chain;
4797 rtx m_tramp, r_tramp, r_chain, tmp;
4799 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4800 POINTER_TYPE, VOID_TYPE))
4801 return NULL_RTX;
4803 t_tramp = CALL_EXPR_ARG (exp, 0);
4804 t_func = CALL_EXPR_ARG (exp, 1);
4805 t_chain = CALL_EXPR_ARG (exp, 2);
4807 r_tramp = expand_normal (t_tramp);
4808 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4809 MEM_NOTRAP_P (m_tramp) = 1;
4811 /* The TRAMP argument should be the address of a field within the
4812 local function's FRAME decl. Let's see if we can fill in the
4813 to fill in the MEM_ATTRs for this memory. */
4814 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4815 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
4816 true, 0);
4818 tmp = round_trampoline_addr (r_tramp);
4819 if (tmp != r_tramp)
4821 m_tramp = change_address (m_tramp, BLKmode, tmp);
4822 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4823 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4826 /* The FUNC argument should be the address of the nested function.
4827 Extract the actual function decl to pass to the hook. */
4828 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4829 t_func = TREE_OPERAND (t_func, 0);
4830 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4832 r_chain = expand_normal (t_chain);
4834 /* Generate insns to initialize the trampoline. */
4835 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4837 trampolines_created = 1;
4839 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4840 "trampoline generated for nested function %qD", t_func);
4842 return const0_rtx;
4845 static rtx
4846 expand_builtin_adjust_trampoline (tree exp)
4848 rtx tramp;
4850 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4851 return NULL_RTX;
4853 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4854 tramp = round_trampoline_addr (tramp);
4855 if (targetm.calls.trampoline_adjust_address)
4856 tramp = targetm.calls.trampoline_adjust_address (tramp);
4858 return tramp;
4861 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4862 function. The function first checks whether the back end provides
4863 an insn to implement signbit for the respective mode. If not, it
4864 checks whether the floating point format of the value is such that
4865 the sign bit can be extracted. If that is not the case, the
4866 function returns NULL_RTX to indicate that a normal call should be
4867 emitted rather than expanding the function in-line. EXP is the
4868 expression that is a call to the builtin function; if convenient,
4869 the result should be placed in TARGET. */
4870 static rtx
4871 expand_builtin_signbit (tree exp, rtx target)
4873 const struct real_format *fmt;
4874 enum machine_mode fmode, imode, rmode;
4875 tree arg;
4876 int word, bitpos;
4877 enum insn_code icode;
4878 rtx temp;
4879 location_t loc = EXPR_LOCATION (exp);
4881 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4882 return NULL_RTX;
4884 arg = CALL_EXPR_ARG (exp, 0);
4885 fmode = TYPE_MODE (TREE_TYPE (arg));
4886 rmode = TYPE_MODE (TREE_TYPE (exp));
4887 fmt = REAL_MODE_FORMAT (fmode);
4889 arg = builtin_save_expr (arg);
4891 /* Expand the argument yielding a RTX expression. */
4892 temp = expand_normal (arg);
4894 /* Check if the back end provides an insn that handles signbit for the
4895 argument's mode. */
4896 icode = optab_handler (signbit_optab, fmode);
4897 if (icode != CODE_FOR_nothing)
4899 rtx last = get_last_insn ();
4900 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4901 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4902 return target;
4903 delete_insns_since (last);
4906 /* For floating point formats without a sign bit, implement signbit
4907 as "ARG < 0.0". */
4908 bitpos = fmt->signbit_ro;
4909 if (bitpos < 0)
4911 /* But we can't do this if the format supports signed zero. */
4912 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4913 return NULL_RTX;
4915 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4916 build_real (TREE_TYPE (arg), dconst0));
4917 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4920 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4922 imode = int_mode_for_mode (fmode);
4923 if (imode == BLKmode)
4924 return NULL_RTX;
4925 temp = gen_lowpart (imode, temp);
4927 else
4929 imode = word_mode;
4930 /* Handle targets with different FP word orders. */
4931 if (FLOAT_WORDS_BIG_ENDIAN)
4932 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4933 else
4934 word = bitpos / BITS_PER_WORD;
4935 temp = operand_subword_force (temp, word, fmode);
4936 bitpos = bitpos % BITS_PER_WORD;
4939 /* Force the intermediate word_mode (or narrower) result into a
4940 register. This avoids attempting to create paradoxical SUBREGs
4941 of floating point modes below. */
4942 temp = force_reg (imode, temp);
4944 /* If the bitpos is within the "result mode" lowpart, the operation
4945 can be implement with a single bitwise AND. Otherwise, we need
4946 a right shift and an AND. */
4948 if (bitpos < GET_MODE_BITSIZE (rmode))
4950 double_int mask = double_int_setbit (double_int_zero, bitpos);
4952 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4953 temp = gen_lowpart (rmode, temp);
4954 temp = expand_binop (rmode, and_optab, temp,
4955 immed_double_int_const (mask, rmode),
4956 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4958 else
4960 /* Perform a logical right shift to place the signbit in the least
4961 significant bit, then truncate the result to the desired mode
4962 and mask just this bit. */
4963 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4964 temp = gen_lowpart (rmode, temp);
4965 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4966 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4969 return temp;
4972 /* Expand fork or exec calls. TARGET is the desired target of the
4973 call. EXP is the call. FN is the
4974 identificator of the actual function. IGNORE is nonzero if the
4975 value is to be ignored. */
4977 static rtx
4978 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4980 tree id, decl;
4981 tree call;
4983 /* If we are not profiling, just call the function. */
4984 if (!profile_arc_flag)
4985 return NULL_RTX;
4987 /* Otherwise call the wrapper. This should be equivalent for the rest of
4988 compiler, so the code does not diverge, and the wrapper may run the
4989 code necessary for keeping the profiling sane. */
4991 switch (DECL_FUNCTION_CODE (fn))
4993 case BUILT_IN_FORK:
4994 id = get_identifier ("__gcov_fork");
4995 break;
4997 case BUILT_IN_EXECL:
4998 id = get_identifier ("__gcov_execl");
4999 break;
5001 case BUILT_IN_EXECV:
5002 id = get_identifier ("__gcov_execv");
5003 break;
5005 case BUILT_IN_EXECLP:
5006 id = get_identifier ("__gcov_execlp");
5007 break;
5009 case BUILT_IN_EXECLE:
5010 id = get_identifier ("__gcov_execle");
5011 break;
5013 case BUILT_IN_EXECVP:
5014 id = get_identifier ("__gcov_execvp");
5015 break;
5017 case BUILT_IN_EXECVE:
5018 id = get_identifier ("__gcov_execve");
5019 break;
5021 default:
5022 gcc_unreachable ();
5025 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5026 FUNCTION_DECL, id, TREE_TYPE (fn));
5027 DECL_EXTERNAL (decl) = 1;
5028 TREE_PUBLIC (decl) = 1;
5029 DECL_ARTIFICIAL (decl) = 1;
5030 TREE_NOTHROW (decl) = 1;
5031 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5032 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5033 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5034 return expand_call (call, target, ignore);
5039 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5040 the pointer in these functions is void*, the tree optimizers may remove
5041 casts. The mode computed in expand_builtin isn't reliable either, due
5042 to __sync_bool_compare_and_swap.
5044 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5045 group of builtins. This gives us log2 of the mode size. */
5047 static inline enum machine_mode
5048 get_builtin_sync_mode (int fcode_diff)
5050 /* The size is not negotiable, so ask not to get BLKmode in return
5051 if the target indicates that a smaller size would be better. */
5052 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5055 /* Expand the memory expression LOC and return the appropriate memory operand
5056 for the builtin_sync operations. */
5058 static rtx
5059 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5061 rtx addr, mem;
5063 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5064 addr = convert_memory_address (Pmode, addr);
5066 /* Note that we explicitly do not want any alias information for this
5067 memory, so that we kill all other live memories. Otherwise we don't
5068 satisfy the full barrier semantics of the intrinsic. */
5069 mem = validize_mem (gen_rtx_MEM (mode, addr));
5071 /* The alignment needs to be at least according to that of the mode. */
5072 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5073 get_pointer_alignment (loc)));
5074 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5075 MEM_VOLATILE_P (mem) = 1;
5077 return mem;
5080 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5081 EXP is the CALL_EXPR. CODE is the rtx code
5082 that corresponds to the arithmetic or logical operation from the name;
5083 an exception here is that NOT actually means NAND. TARGET is an optional
5084 place for us to store the results; AFTER is true if this is the
5085 fetch_and_xxx form. IGNORE is true if we don't actually care about
5086 the result of the operation at all. */
5088 static rtx
5089 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5090 enum rtx_code code, bool after,
5091 rtx target, bool ignore)
5093 rtx val, mem;
5094 enum machine_mode old_mode;
5095 location_t loc = EXPR_LOCATION (exp);
5097 if (code == NOT && warn_sync_nand)
5099 tree fndecl = get_callee_fndecl (exp);
5100 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5102 static bool warned_f_a_n, warned_n_a_f;
5104 switch (fcode)
5106 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5107 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5108 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5109 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5110 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5112 if (warned_f_a_n)
5113 break;
5115 fndecl = implicit_built_in_decls[BUILT_IN_SYNC_FETCH_AND_NAND_N];
5116 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5117 warned_f_a_n = true;
5118 break;
5120 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5121 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5122 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5123 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5124 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5126 if (warned_n_a_f)
5127 break;
5129 fndecl = implicit_built_in_decls[BUILT_IN_SYNC_NAND_AND_FETCH_N];
5130 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5131 warned_n_a_f = true;
5132 break;
5134 default:
5135 gcc_unreachable ();
5139 /* Expand the operands. */
5140 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5142 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5143 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5144 of CONST_INTs, where we know the old_mode only from the call argument. */
5145 old_mode = GET_MODE (val);
5146 if (old_mode == VOIDmode)
5147 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5148 val = convert_modes (mode, old_mode, val, 1);
5150 if (ignore)
5151 return expand_sync_operation (mem, val, code);
5152 else
5153 return expand_sync_fetch_operation (mem, val, code, after, target);
5156 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5157 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5158 true if this is the boolean form. TARGET is a place for us to store the
5159 results; this is NOT optional if IS_BOOL is true. */
5161 static rtx
5162 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5163 bool is_bool, rtx target)
5165 rtx old_val, new_val, mem;
5166 enum machine_mode old_mode;
5168 /* Expand the operands. */
5169 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5172 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5173 mode, EXPAND_NORMAL);
5174 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5175 of CONST_INTs, where we know the old_mode only from the call argument. */
5176 old_mode = GET_MODE (old_val);
5177 if (old_mode == VOIDmode)
5178 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5179 old_val = convert_modes (mode, old_mode, old_val, 1);
5181 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5182 mode, EXPAND_NORMAL);
5183 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5184 of CONST_INTs, where we know the old_mode only from the call argument. */
5185 old_mode = GET_MODE (new_val);
5186 if (old_mode == VOIDmode)
5187 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5188 new_val = convert_modes (mode, old_mode, new_val, 1);
5190 if (is_bool)
5191 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5192 else
5193 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5196 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5197 general form is actually an atomic exchange, and some targets only
5198 support a reduced form with the second argument being a constant 1.
5199 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5200 the results. */
5202 static rtx
5203 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5204 rtx target)
5206 rtx val, mem;
5207 enum machine_mode old_mode;
5209 /* Expand the operands. */
5210 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5211 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5212 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5213 of CONST_INTs, where we know the old_mode only from the call argument. */
5214 old_mode = GET_MODE (val);
5215 if (old_mode == VOIDmode)
5216 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5217 val = convert_modes (mode, old_mode, val, 1);
5219 return expand_sync_lock_test_and_set (mem, val, target);
5222 /* Expand the __sync_synchronize intrinsic. */
5224 static void
5225 expand_builtin_sync_synchronize (void)
5227 gimple x;
5228 VEC (tree, gc) *v_clobbers;
5230 #ifdef HAVE_memory_barrier
5231 if (HAVE_memory_barrier)
5233 emit_insn (gen_memory_barrier ());
5234 return;
5236 #endif
5238 if (synchronize_libfunc != NULL_RTX)
5240 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5241 return;
5244 /* If no explicit memory barrier instruction is available, create an
5245 empty asm stmt with a memory clobber. */
5246 v_clobbers = VEC_alloc (tree, gc, 1);
5247 VEC_quick_push (tree, v_clobbers,
5248 tree_cons (NULL, build_string (6, "memory"), NULL));
5249 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5250 gimple_asm_set_volatile (x, true);
5251 expand_asm_stmt (x);
5254 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5256 static void
5257 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5259 struct expand_operand ops[2];
5260 enum insn_code icode;
5261 rtx mem;
5263 /* Expand the operands. */
5264 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5266 /* If there is an explicit operation in the md file, use it. */
5267 icode = direct_optab_handler (sync_lock_release_optab, mode);
5268 if (icode != CODE_FOR_nothing)
5270 create_fixed_operand (&ops[0], mem);
5271 create_input_operand (&ops[1], const0_rtx, mode);
5272 if (maybe_expand_insn (icode, 2, ops))
5273 return;
5276 /* Otherwise we can implement this operation by emitting a barrier
5277 followed by a store of zero. */
5278 expand_builtin_sync_synchronize ();
5279 emit_move_insn (mem, const0_rtx);
5282 /* Expand an expression EXP that calls a built-in function,
5283 with result going to TARGET if that's convenient
5284 (and in mode MODE if that's convenient).
5285 SUBTARGET may be used as the target for computing one of EXP's operands.
5286 IGNORE is nonzero if the value is to be ignored. */
5289 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5290 int ignore)
5292 tree fndecl = get_callee_fndecl (exp);
5293 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5294 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5295 int flags;
5297 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5298 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5300 /* When not optimizing, generate calls to library functions for a certain
5301 set of builtins. */
5302 if (!optimize
5303 && !called_as_built_in (fndecl)
5304 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5305 && fcode != BUILT_IN_ALLOCA
5306 && fcode != BUILT_IN_FREE)
5307 return expand_call (exp, target, ignore);
5309 /* The built-in function expanders test for target == const0_rtx
5310 to determine whether the function's result will be ignored. */
5311 if (ignore)
5312 target = const0_rtx;
5314 /* If the result of a pure or const built-in function is ignored, and
5315 none of its arguments are volatile, we can avoid expanding the
5316 built-in call and just evaluate the arguments for side-effects. */
5317 if (target == const0_rtx
5318 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5319 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5321 bool volatilep = false;
5322 tree arg;
5323 call_expr_arg_iterator iter;
5325 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5326 if (TREE_THIS_VOLATILE (arg))
5328 volatilep = true;
5329 break;
5332 if (! volatilep)
5334 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5335 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5336 return const0_rtx;
5340 switch (fcode)
5342 CASE_FLT_FN (BUILT_IN_FABS):
5343 target = expand_builtin_fabs (exp, target, subtarget);
5344 if (target)
5345 return target;
5346 break;
5348 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5349 target = expand_builtin_copysign (exp, target, subtarget);
5350 if (target)
5351 return target;
5352 break;
5354 /* Just do a normal library call if we were unable to fold
5355 the values. */
5356 CASE_FLT_FN (BUILT_IN_CABS):
5357 break;
5359 CASE_FLT_FN (BUILT_IN_EXP):
5360 CASE_FLT_FN (BUILT_IN_EXP10):
5361 CASE_FLT_FN (BUILT_IN_POW10):
5362 CASE_FLT_FN (BUILT_IN_EXP2):
5363 CASE_FLT_FN (BUILT_IN_EXPM1):
5364 CASE_FLT_FN (BUILT_IN_LOGB):
5365 CASE_FLT_FN (BUILT_IN_LOG):
5366 CASE_FLT_FN (BUILT_IN_LOG10):
5367 CASE_FLT_FN (BUILT_IN_LOG2):
5368 CASE_FLT_FN (BUILT_IN_LOG1P):
5369 CASE_FLT_FN (BUILT_IN_TAN):
5370 CASE_FLT_FN (BUILT_IN_ASIN):
5371 CASE_FLT_FN (BUILT_IN_ACOS):
5372 CASE_FLT_FN (BUILT_IN_ATAN):
5373 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5374 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5375 because of possible accuracy problems. */
5376 if (! flag_unsafe_math_optimizations)
5377 break;
5378 CASE_FLT_FN (BUILT_IN_SQRT):
5379 CASE_FLT_FN (BUILT_IN_FLOOR):
5380 CASE_FLT_FN (BUILT_IN_CEIL):
5381 CASE_FLT_FN (BUILT_IN_TRUNC):
5382 CASE_FLT_FN (BUILT_IN_ROUND):
5383 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5384 CASE_FLT_FN (BUILT_IN_RINT):
5385 target = expand_builtin_mathfn (exp, target, subtarget);
5386 if (target)
5387 return target;
5388 break;
5390 CASE_FLT_FN (BUILT_IN_FMA):
5391 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5392 if (target)
5393 return target;
5394 break;
5396 CASE_FLT_FN (BUILT_IN_ILOGB):
5397 if (! flag_unsafe_math_optimizations)
5398 break;
5399 CASE_FLT_FN (BUILT_IN_ISINF):
5400 CASE_FLT_FN (BUILT_IN_FINITE):
5401 case BUILT_IN_ISFINITE:
5402 case BUILT_IN_ISNORMAL:
5403 target = expand_builtin_interclass_mathfn (exp, target);
5404 if (target)
5405 return target;
5406 break;
5408 CASE_FLT_FN (BUILT_IN_ICEIL):
5409 CASE_FLT_FN (BUILT_IN_LCEIL):
5410 CASE_FLT_FN (BUILT_IN_LLCEIL):
5411 CASE_FLT_FN (BUILT_IN_LFLOOR):
5412 CASE_FLT_FN (BUILT_IN_IFLOOR):
5413 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5414 target = expand_builtin_int_roundingfn (exp, target);
5415 if (target)
5416 return target;
5417 break;
5419 CASE_FLT_FN (BUILT_IN_IRINT):
5420 CASE_FLT_FN (BUILT_IN_LRINT):
5421 CASE_FLT_FN (BUILT_IN_LLRINT):
5422 CASE_FLT_FN (BUILT_IN_IROUND):
5423 CASE_FLT_FN (BUILT_IN_LROUND):
5424 CASE_FLT_FN (BUILT_IN_LLROUND):
5425 target = expand_builtin_int_roundingfn_2 (exp, target);
5426 if (target)
5427 return target;
5428 break;
5430 CASE_FLT_FN (BUILT_IN_POWI):
5431 target = expand_builtin_powi (exp, target);
5432 if (target)
5433 return target;
5434 break;
5436 CASE_FLT_FN (BUILT_IN_ATAN2):
5437 CASE_FLT_FN (BUILT_IN_LDEXP):
5438 CASE_FLT_FN (BUILT_IN_SCALB):
5439 CASE_FLT_FN (BUILT_IN_SCALBN):
5440 CASE_FLT_FN (BUILT_IN_SCALBLN):
5441 if (! flag_unsafe_math_optimizations)
5442 break;
5444 CASE_FLT_FN (BUILT_IN_FMOD):
5445 CASE_FLT_FN (BUILT_IN_REMAINDER):
5446 CASE_FLT_FN (BUILT_IN_DREM):
5447 CASE_FLT_FN (BUILT_IN_POW):
5448 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5449 if (target)
5450 return target;
5451 break;
5453 CASE_FLT_FN (BUILT_IN_CEXPI):
5454 target = expand_builtin_cexpi (exp, target);
5455 gcc_assert (target);
5456 return target;
5458 CASE_FLT_FN (BUILT_IN_SIN):
5459 CASE_FLT_FN (BUILT_IN_COS):
5460 if (! flag_unsafe_math_optimizations)
5461 break;
5462 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5463 if (target)
5464 return target;
5465 break;
5467 CASE_FLT_FN (BUILT_IN_SINCOS):
5468 if (! flag_unsafe_math_optimizations)
5469 break;
5470 target = expand_builtin_sincos (exp);
5471 if (target)
5472 return target;
5473 break;
5475 case BUILT_IN_APPLY_ARGS:
5476 return expand_builtin_apply_args ();
5478 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5479 FUNCTION with a copy of the parameters described by
5480 ARGUMENTS, and ARGSIZE. It returns a block of memory
5481 allocated on the stack into which is stored all the registers
5482 that might possibly be used for returning the result of a
5483 function. ARGUMENTS is the value returned by
5484 __builtin_apply_args. ARGSIZE is the number of bytes of
5485 arguments that must be copied. ??? How should this value be
5486 computed? We'll also need a safe worst case value for varargs
5487 functions. */
5488 case BUILT_IN_APPLY:
5489 if (!validate_arglist (exp, POINTER_TYPE,
5490 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5491 && !validate_arglist (exp, REFERENCE_TYPE,
5492 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5493 return const0_rtx;
5494 else
5496 rtx ops[3];
5498 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5499 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5500 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5502 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5505 /* __builtin_return (RESULT) causes the function to return the
5506 value described by RESULT. RESULT is address of the block of
5507 memory returned by __builtin_apply. */
5508 case BUILT_IN_RETURN:
5509 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5510 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5511 return const0_rtx;
5513 case BUILT_IN_SAVEREGS:
5514 return expand_builtin_saveregs ();
5516 case BUILT_IN_VA_ARG_PACK:
5517 /* All valid uses of __builtin_va_arg_pack () are removed during
5518 inlining. */
5519 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5520 return const0_rtx;
5522 case BUILT_IN_VA_ARG_PACK_LEN:
5523 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5524 inlining. */
5525 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5526 return const0_rtx;
5528 /* Return the address of the first anonymous stack arg. */
5529 case BUILT_IN_NEXT_ARG:
5530 if (fold_builtin_next_arg (exp, false))
5531 return const0_rtx;
5532 return expand_builtin_next_arg ();
5534 case BUILT_IN_CLEAR_CACHE:
5535 target = expand_builtin___clear_cache (exp);
5536 if (target)
5537 return target;
5538 break;
5540 case BUILT_IN_CLASSIFY_TYPE:
5541 return expand_builtin_classify_type (exp);
5543 case BUILT_IN_CONSTANT_P:
5544 return const0_rtx;
5546 case BUILT_IN_FRAME_ADDRESS:
5547 case BUILT_IN_RETURN_ADDRESS:
5548 return expand_builtin_frame_address (fndecl, exp);
5550 /* Returns the address of the area where the structure is returned.
5551 0 otherwise. */
5552 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5553 if (call_expr_nargs (exp) != 0
5554 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5555 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5556 return const0_rtx;
5557 else
5558 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5560 case BUILT_IN_ALLOCA:
5561 /* If the allocation stems from the declaration of a variable-sized
5562 object, it cannot accumulate. */
5563 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
5564 if (target)
5565 return target;
5566 break;
5568 case BUILT_IN_STACK_SAVE:
5569 return expand_stack_save ();
5571 case BUILT_IN_STACK_RESTORE:
5572 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5573 return const0_rtx;
5575 case BUILT_IN_BSWAP32:
5576 case BUILT_IN_BSWAP64:
5577 target = expand_builtin_bswap (exp, target, subtarget);
5579 if (target)
5580 return target;
5581 break;
5583 CASE_INT_FN (BUILT_IN_FFS):
5584 case BUILT_IN_FFSIMAX:
5585 target = expand_builtin_unop (target_mode, exp, target,
5586 subtarget, ffs_optab);
5587 if (target)
5588 return target;
5589 break;
5591 CASE_INT_FN (BUILT_IN_CLZ):
5592 case BUILT_IN_CLZIMAX:
5593 target = expand_builtin_unop (target_mode, exp, target,
5594 subtarget, clz_optab);
5595 if (target)
5596 return target;
5597 break;
5599 CASE_INT_FN (BUILT_IN_CTZ):
5600 case BUILT_IN_CTZIMAX:
5601 target = expand_builtin_unop (target_mode, exp, target,
5602 subtarget, ctz_optab);
5603 if (target)
5604 return target;
5605 break;
5607 CASE_INT_FN (BUILT_IN_CLRSB):
5608 case BUILT_IN_CLRSBIMAX:
5609 target = expand_builtin_unop (target_mode, exp, target,
5610 subtarget, clrsb_optab);
5611 if (target)
5612 return target;
5613 break;
5615 CASE_INT_FN (BUILT_IN_POPCOUNT):
5616 case BUILT_IN_POPCOUNTIMAX:
5617 target = expand_builtin_unop (target_mode, exp, target,
5618 subtarget, popcount_optab);
5619 if (target)
5620 return target;
5621 break;
5623 CASE_INT_FN (BUILT_IN_PARITY):
5624 case BUILT_IN_PARITYIMAX:
5625 target = expand_builtin_unop (target_mode, exp, target,
5626 subtarget, parity_optab);
5627 if (target)
5628 return target;
5629 break;
5631 case BUILT_IN_STRLEN:
5632 target = expand_builtin_strlen (exp, target, target_mode);
5633 if (target)
5634 return target;
5635 break;
5637 case BUILT_IN_STRCPY:
5638 target = expand_builtin_strcpy (exp, target);
5639 if (target)
5640 return target;
5641 break;
5643 case BUILT_IN_STRNCPY:
5644 target = expand_builtin_strncpy (exp, target);
5645 if (target)
5646 return target;
5647 break;
5649 case BUILT_IN_STPCPY:
5650 target = expand_builtin_stpcpy (exp, target, mode);
5651 if (target)
5652 return target;
5653 break;
5655 case BUILT_IN_MEMCPY:
5656 target = expand_builtin_memcpy (exp, target);
5657 if (target)
5658 return target;
5659 break;
5661 case BUILT_IN_MEMPCPY:
5662 target = expand_builtin_mempcpy (exp, target, mode);
5663 if (target)
5664 return target;
5665 break;
5667 case BUILT_IN_MEMSET:
5668 target = expand_builtin_memset (exp, target, mode);
5669 if (target)
5670 return target;
5671 break;
5673 case BUILT_IN_BZERO:
5674 target = expand_builtin_bzero (exp);
5675 if (target)
5676 return target;
5677 break;
5679 case BUILT_IN_STRCMP:
5680 target = expand_builtin_strcmp (exp, target);
5681 if (target)
5682 return target;
5683 break;
5685 case BUILT_IN_STRNCMP:
5686 target = expand_builtin_strncmp (exp, target, mode);
5687 if (target)
5688 return target;
5689 break;
5691 case BUILT_IN_BCMP:
5692 case BUILT_IN_MEMCMP:
5693 target = expand_builtin_memcmp (exp, target, mode);
5694 if (target)
5695 return target;
5696 break;
5698 case BUILT_IN_SETJMP:
5699 /* This should have been lowered to the builtins below. */
5700 gcc_unreachable ();
5702 case BUILT_IN_SETJMP_SETUP:
5703 /* __builtin_setjmp_setup is passed a pointer to an array of five words
5704 and the receiver label. */
5705 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5707 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5708 VOIDmode, EXPAND_NORMAL);
5709 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
5710 rtx label_r = label_rtx (label);
5712 /* This is copied from the handling of non-local gotos. */
5713 expand_builtin_setjmp_setup (buf_addr, label_r);
5714 nonlocal_goto_handler_labels
5715 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
5716 nonlocal_goto_handler_labels);
5717 /* ??? Do not let expand_label treat us as such since we would
5718 not want to be both on the list of non-local labels and on
5719 the list of forced labels. */
5720 FORCED_LABEL (label) = 0;
5721 return const0_rtx;
5723 break;
5725 case BUILT_IN_SETJMP_DISPATCHER:
5726 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
5727 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5729 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
5730 rtx label_r = label_rtx (label);
5732 /* Remove the dispatcher label from the list of non-local labels
5733 since the receiver labels have been added to it above. */
5734 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
5735 return const0_rtx;
5737 break;
5739 case BUILT_IN_SETJMP_RECEIVER:
5740 /* __builtin_setjmp_receiver is passed the receiver label. */
5741 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5743 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
5744 rtx label_r = label_rtx (label);
5746 expand_builtin_setjmp_receiver (label_r);
5747 return const0_rtx;
5749 break;
5751 /* __builtin_longjmp is passed a pointer to an array of five words.
5752 It's similar to the C library longjmp function but works with
5753 __builtin_setjmp above. */
5754 case BUILT_IN_LONGJMP:
5755 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5757 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5758 VOIDmode, EXPAND_NORMAL);
5759 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
5761 if (value != const1_rtx)
5763 error ("%<__builtin_longjmp%> second argument must be 1");
5764 return const0_rtx;
5767 expand_builtin_longjmp (buf_addr, value);
5768 return const0_rtx;
5770 break;
5772 case BUILT_IN_NONLOCAL_GOTO:
5773 target = expand_builtin_nonlocal_goto (exp);
5774 if (target)
5775 return target;
5776 break;
5778 /* This updates the setjmp buffer that is its argument with the value
5779 of the current stack pointer. */
5780 case BUILT_IN_UPDATE_SETJMP_BUF:
5781 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5783 rtx buf_addr
5784 = expand_normal (CALL_EXPR_ARG (exp, 0));
5786 expand_builtin_update_setjmp_buf (buf_addr);
5787 return const0_rtx;
5789 break;
5791 case BUILT_IN_TRAP:
5792 expand_builtin_trap ();
5793 return const0_rtx;
5795 case BUILT_IN_UNREACHABLE:
5796 expand_builtin_unreachable ();
5797 return const0_rtx;
5799 CASE_FLT_FN (BUILT_IN_SIGNBIT):
5800 case BUILT_IN_SIGNBITD32:
5801 case BUILT_IN_SIGNBITD64:
5802 case BUILT_IN_SIGNBITD128:
5803 target = expand_builtin_signbit (exp, target);
5804 if (target)
5805 return target;
5806 break;
5808 /* Various hooks for the DWARF 2 __throw routine. */
5809 case BUILT_IN_UNWIND_INIT:
5810 expand_builtin_unwind_init ();
5811 return const0_rtx;
5812 case BUILT_IN_DWARF_CFA:
5813 return virtual_cfa_rtx;
5814 #ifdef DWARF2_UNWIND_INFO
5815 case BUILT_IN_DWARF_SP_COLUMN:
5816 return expand_builtin_dwarf_sp_column ();
5817 case BUILT_IN_INIT_DWARF_REG_SIZES:
5818 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
5819 return const0_rtx;
5820 #endif
5821 case BUILT_IN_FROB_RETURN_ADDR:
5822 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
5823 case BUILT_IN_EXTRACT_RETURN_ADDR:
5824 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
5825 case BUILT_IN_EH_RETURN:
5826 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
5827 CALL_EXPR_ARG (exp, 1));
5828 return const0_rtx;
5829 #ifdef EH_RETURN_DATA_REGNO
5830 case BUILT_IN_EH_RETURN_DATA_REGNO:
5831 return expand_builtin_eh_return_data_regno (exp);
5832 #endif
5833 case BUILT_IN_EXTEND_POINTER:
5834 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
5835 case BUILT_IN_EH_POINTER:
5836 return expand_builtin_eh_pointer (exp);
5837 case BUILT_IN_EH_FILTER:
5838 return expand_builtin_eh_filter (exp);
5839 case BUILT_IN_EH_COPY_VALUES:
5840 return expand_builtin_eh_copy_values (exp);
5842 case BUILT_IN_VA_START:
5843 return expand_builtin_va_start (exp);
5844 case BUILT_IN_VA_END:
5845 return expand_builtin_va_end (exp);
5846 case BUILT_IN_VA_COPY:
5847 return expand_builtin_va_copy (exp);
5848 case BUILT_IN_EXPECT:
5849 return expand_builtin_expect (exp, target);
5850 case BUILT_IN_ASSUME_ALIGNED:
5851 return expand_builtin_assume_aligned (exp, target);
5852 case BUILT_IN_PREFETCH:
5853 expand_builtin_prefetch (exp);
5854 return const0_rtx;
5856 case BUILT_IN_INIT_TRAMPOLINE:
5857 return expand_builtin_init_trampoline (exp);
5858 case BUILT_IN_ADJUST_TRAMPOLINE:
5859 return expand_builtin_adjust_trampoline (exp);
5861 case BUILT_IN_FORK:
5862 case BUILT_IN_EXECL:
5863 case BUILT_IN_EXECV:
5864 case BUILT_IN_EXECLP:
5865 case BUILT_IN_EXECLE:
5866 case BUILT_IN_EXECVP:
5867 case BUILT_IN_EXECVE:
5868 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
5869 if (target)
5870 return target;
5871 break;
5873 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
5874 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
5875 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
5876 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
5877 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
5878 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
5879 target = expand_builtin_sync_operation (mode, exp, PLUS,
5880 false, target, ignore);
5881 if (target)
5882 return target;
5883 break;
5885 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
5886 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
5887 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
5888 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
5889 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
5890 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
5891 target = expand_builtin_sync_operation (mode, exp, MINUS,
5892 false, target, ignore);
5893 if (target)
5894 return target;
5895 break;
5897 case BUILT_IN_SYNC_FETCH_AND_OR_1:
5898 case BUILT_IN_SYNC_FETCH_AND_OR_2:
5899 case BUILT_IN_SYNC_FETCH_AND_OR_4:
5900 case BUILT_IN_SYNC_FETCH_AND_OR_8:
5901 case BUILT_IN_SYNC_FETCH_AND_OR_16:
5902 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
5903 target = expand_builtin_sync_operation (mode, exp, IOR,
5904 false, target, ignore);
5905 if (target)
5906 return target;
5907 break;
5909 case BUILT_IN_SYNC_FETCH_AND_AND_1:
5910 case BUILT_IN_SYNC_FETCH_AND_AND_2:
5911 case BUILT_IN_SYNC_FETCH_AND_AND_4:
5912 case BUILT_IN_SYNC_FETCH_AND_AND_8:
5913 case BUILT_IN_SYNC_FETCH_AND_AND_16:
5914 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
5915 target = expand_builtin_sync_operation (mode, exp, AND,
5916 false, target, ignore);
5917 if (target)
5918 return target;
5919 break;
5921 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
5922 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
5923 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
5924 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
5925 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
5926 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
5927 target = expand_builtin_sync_operation (mode, exp, XOR,
5928 false, target, ignore);
5929 if (target)
5930 return target;
5931 break;
5933 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5934 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5935 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5936 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5937 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5938 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
5939 target = expand_builtin_sync_operation (mode, exp, NOT,
5940 false, target, ignore);
5941 if (target)
5942 return target;
5943 break;
5945 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
5946 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
5947 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
5948 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
5949 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
5950 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
5951 target = expand_builtin_sync_operation (mode, exp, PLUS,
5952 true, target, ignore);
5953 if (target)
5954 return target;
5955 break;
5957 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
5958 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
5959 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
5960 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
5961 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
5962 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
5963 target = expand_builtin_sync_operation (mode, exp, MINUS,
5964 true, target, ignore);
5965 if (target)
5966 return target;
5967 break;
5969 case BUILT_IN_SYNC_OR_AND_FETCH_1:
5970 case BUILT_IN_SYNC_OR_AND_FETCH_2:
5971 case BUILT_IN_SYNC_OR_AND_FETCH_4:
5972 case BUILT_IN_SYNC_OR_AND_FETCH_8:
5973 case BUILT_IN_SYNC_OR_AND_FETCH_16:
5974 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
5975 target = expand_builtin_sync_operation (mode, exp, IOR,
5976 true, target, ignore);
5977 if (target)
5978 return target;
5979 break;
5981 case BUILT_IN_SYNC_AND_AND_FETCH_1:
5982 case BUILT_IN_SYNC_AND_AND_FETCH_2:
5983 case BUILT_IN_SYNC_AND_AND_FETCH_4:
5984 case BUILT_IN_SYNC_AND_AND_FETCH_8:
5985 case BUILT_IN_SYNC_AND_AND_FETCH_16:
5986 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
5987 target = expand_builtin_sync_operation (mode, exp, AND,
5988 true, target, ignore);
5989 if (target)
5990 return target;
5991 break;
5993 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
5994 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
5995 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
5996 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
5997 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
5998 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
5999 target = expand_builtin_sync_operation (mode, exp, XOR,
6000 true, target, ignore);
6001 if (target)
6002 return target;
6003 break;
6005 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6006 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6007 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6008 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6009 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6010 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6011 target = expand_builtin_sync_operation (mode, exp, NOT,
6012 true, target, ignore);
6013 if (target)
6014 return target;
6015 break;
6017 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6018 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6019 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6020 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6021 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6022 if (mode == VOIDmode)
6023 mode = TYPE_MODE (boolean_type_node);
6024 if (!target || !register_operand (target, mode))
6025 target = gen_reg_rtx (mode);
6027 mode = get_builtin_sync_mode
6028 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6029 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6030 if (target)
6031 return target;
6032 break;
6034 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6035 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6036 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6037 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6038 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6039 mode = get_builtin_sync_mode
6040 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6041 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6042 if (target)
6043 return target;
6044 break;
6046 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6047 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6048 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6049 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6050 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6051 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6052 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6053 if (target)
6054 return target;
6055 break;
6057 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6058 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6059 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6060 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6061 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6062 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6063 expand_builtin_sync_lock_release (mode, exp);
6064 return const0_rtx;
6066 case BUILT_IN_SYNC_SYNCHRONIZE:
6067 expand_builtin_sync_synchronize ();
6068 return const0_rtx;
6070 case BUILT_IN_OBJECT_SIZE:
6071 return expand_builtin_object_size (exp);
6073 case BUILT_IN_MEMCPY_CHK:
6074 case BUILT_IN_MEMPCPY_CHK:
6075 case BUILT_IN_MEMMOVE_CHK:
6076 case BUILT_IN_MEMSET_CHK:
6077 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6078 if (target)
6079 return target;
6080 break;
6082 case BUILT_IN_STRCPY_CHK:
6083 case BUILT_IN_STPCPY_CHK:
6084 case BUILT_IN_STRNCPY_CHK:
6085 case BUILT_IN_STRCAT_CHK:
6086 case BUILT_IN_STRNCAT_CHK:
6087 case BUILT_IN_SNPRINTF_CHK:
6088 case BUILT_IN_VSNPRINTF_CHK:
6089 maybe_emit_chk_warning (exp, fcode);
6090 break;
6092 case BUILT_IN_SPRINTF_CHK:
6093 case BUILT_IN_VSPRINTF_CHK:
6094 maybe_emit_sprintf_chk_warning (exp, fcode);
6095 break;
6097 case BUILT_IN_FREE:
6098 maybe_emit_free_warning (exp);
6099 break;
6101 default: /* just do library call, if unknown builtin */
6102 break;
6105 /* The switch statement above can drop through to cause the function
6106 to be called normally. */
6107 return expand_call (exp, target, ignore);
6110 /* Determine whether a tree node represents a call to a built-in
6111 function. If the tree T is a call to a built-in function with
6112 the right number of arguments of the appropriate types, return
6113 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6114 Otherwise the return value is END_BUILTINS. */
6116 enum built_in_function
6117 builtin_mathfn_code (const_tree t)
6119 const_tree fndecl, arg, parmlist;
6120 const_tree argtype, parmtype;
6121 const_call_expr_arg_iterator iter;
6123 if (TREE_CODE (t) != CALL_EXPR
6124 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6125 return END_BUILTINS;
6127 fndecl = get_callee_fndecl (t);
6128 if (fndecl == NULL_TREE
6129 || TREE_CODE (fndecl) != FUNCTION_DECL
6130 || ! DECL_BUILT_IN (fndecl)
6131 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6132 return END_BUILTINS;
6134 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6135 init_const_call_expr_arg_iterator (t, &iter);
6136 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6138 /* If a function doesn't take a variable number of arguments,
6139 the last element in the list will have type `void'. */
6140 parmtype = TREE_VALUE (parmlist);
6141 if (VOID_TYPE_P (parmtype))
6143 if (more_const_call_expr_args_p (&iter))
6144 return END_BUILTINS;
6145 return DECL_FUNCTION_CODE (fndecl);
6148 if (! more_const_call_expr_args_p (&iter))
6149 return END_BUILTINS;
6151 arg = next_const_call_expr_arg (&iter);
6152 argtype = TREE_TYPE (arg);
6154 if (SCALAR_FLOAT_TYPE_P (parmtype))
6156 if (! SCALAR_FLOAT_TYPE_P (argtype))
6157 return END_BUILTINS;
6159 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6161 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6162 return END_BUILTINS;
6164 else if (POINTER_TYPE_P (parmtype))
6166 if (! POINTER_TYPE_P (argtype))
6167 return END_BUILTINS;
6169 else if (INTEGRAL_TYPE_P (parmtype))
6171 if (! INTEGRAL_TYPE_P (argtype))
6172 return END_BUILTINS;
6174 else
6175 return END_BUILTINS;
6178 /* Variable-length argument list. */
6179 return DECL_FUNCTION_CODE (fndecl);
6182 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6183 evaluate to a constant. */
6185 static tree
6186 fold_builtin_constant_p (tree arg)
6188 /* We return 1 for a numeric type that's known to be a constant
6189 value at compile-time or for an aggregate type that's a
6190 literal constant. */
6191 STRIP_NOPS (arg);
6193 /* If we know this is a constant, emit the constant of one. */
6194 if (CONSTANT_CLASS_P (arg)
6195 || (TREE_CODE (arg) == CONSTRUCTOR
6196 && TREE_CONSTANT (arg)))
6197 return integer_one_node;
6198 if (TREE_CODE (arg) == ADDR_EXPR)
6200 tree op = TREE_OPERAND (arg, 0);
6201 if (TREE_CODE (op) == STRING_CST
6202 || (TREE_CODE (op) == ARRAY_REF
6203 && integer_zerop (TREE_OPERAND (op, 1))
6204 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6205 return integer_one_node;
6208 /* If this expression has side effects, show we don't know it to be a
6209 constant. Likewise if it's a pointer or aggregate type since in
6210 those case we only want literals, since those are only optimized
6211 when generating RTL, not later.
6212 And finally, if we are compiling an initializer, not code, we
6213 need to return a definite result now; there's not going to be any
6214 more optimization done. */
6215 if (TREE_SIDE_EFFECTS (arg)
6216 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6217 || POINTER_TYPE_P (TREE_TYPE (arg))
6218 || cfun == 0
6219 || folding_initializer)
6220 return integer_zero_node;
6222 return NULL_TREE;
6225 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6226 return it as a truthvalue. */
6228 static tree
6229 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6231 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6233 fn = built_in_decls[BUILT_IN_EXPECT];
6234 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6235 ret_type = TREE_TYPE (TREE_TYPE (fn));
6236 pred_type = TREE_VALUE (arg_types);
6237 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6239 pred = fold_convert_loc (loc, pred_type, pred);
6240 expected = fold_convert_loc (loc, expected_type, expected);
6241 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6243 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6244 build_int_cst (ret_type, 0));
6247 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6248 NULL_TREE if no simplification is possible. */
6250 static tree
6251 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6253 tree inner, fndecl, inner_arg0;
6254 enum tree_code code;
6256 /* Distribute the expected value over short-circuiting operators.
6257 See through the cast from truthvalue_type_node to long. */
6258 inner_arg0 = arg0;
6259 while (TREE_CODE (inner_arg0) == NOP_EXPR
6260 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6261 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6262 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6264 /* If this is a builtin_expect within a builtin_expect keep the
6265 inner one. See through a comparison against a constant. It
6266 might have been added to create a thruthvalue. */
6267 inner = inner_arg0;
6269 if (COMPARISON_CLASS_P (inner)
6270 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6271 inner = TREE_OPERAND (inner, 0);
6273 if (TREE_CODE (inner) == CALL_EXPR
6274 && (fndecl = get_callee_fndecl (inner))
6275 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6276 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6277 return arg0;
6279 inner = inner_arg0;
6280 code = TREE_CODE (inner);
6281 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6283 tree op0 = TREE_OPERAND (inner, 0);
6284 tree op1 = TREE_OPERAND (inner, 1);
6286 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6287 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6288 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6290 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6293 /* If the argument isn't invariant then there's nothing else we can do. */
6294 if (!TREE_CONSTANT (inner_arg0))
6295 return NULL_TREE;
6297 /* If we expect that a comparison against the argument will fold to
6298 a constant return the constant. In practice, this means a true
6299 constant or the address of a non-weak symbol. */
6300 inner = inner_arg0;
6301 STRIP_NOPS (inner);
6302 if (TREE_CODE (inner) == ADDR_EXPR)
6306 inner = TREE_OPERAND (inner, 0);
6308 while (TREE_CODE (inner) == COMPONENT_REF
6309 || TREE_CODE (inner) == ARRAY_REF);
6310 if ((TREE_CODE (inner) == VAR_DECL
6311 || TREE_CODE (inner) == FUNCTION_DECL)
6312 && DECL_WEAK (inner))
6313 return NULL_TREE;
6316 /* Otherwise, ARG0 already has the proper type for the return value. */
6317 return arg0;
6320 /* Fold a call to __builtin_classify_type with argument ARG. */
6322 static tree
6323 fold_builtin_classify_type (tree arg)
6325 if (arg == 0)
6326 return build_int_cst (integer_type_node, no_type_class);
6328 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
6331 /* Fold a call to __builtin_strlen with argument ARG. */
6333 static tree
6334 fold_builtin_strlen (location_t loc, tree type, tree arg)
6336 if (!validate_arg (arg, POINTER_TYPE))
6337 return NULL_TREE;
6338 else
6340 tree len = c_strlen (arg, 0);
6342 if (len)
6343 return fold_convert_loc (loc, type, len);
6345 return NULL_TREE;
6349 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6351 static tree
6352 fold_builtin_inf (location_t loc, tree type, int warn)
6354 REAL_VALUE_TYPE real;
6356 /* __builtin_inff is intended to be usable to define INFINITY on all
6357 targets. If an infinity is not available, INFINITY expands "to a
6358 positive constant of type float that overflows at translation
6359 time", footnote "In this case, using INFINITY will violate the
6360 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6361 Thus we pedwarn to ensure this constraint violation is
6362 diagnosed. */
6363 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6364 pedwarn (loc, 0, "target format does not support infinity");
6366 real_inf (&real);
6367 return build_real (type, real);
6370 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6372 static tree
6373 fold_builtin_nan (tree arg, tree type, int quiet)
6375 REAL_VALUE_TYPE real;
6376 const char *str;
6378 if (!validate_arg (arg, POINTER_TYPE))
6379 return NULL_TREE;
6380 str = c_getstr (arg);
6381 if (!str)
6382 return NULL_TREE;
6384 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6385 return NULL_TREE;
6387 return build_real (type, real);
6390 /* Return true if the floating point expression T has an integer value.
6391 We also allow +Inf, -Inf and NaN to be considered integer values. */
6393 static bool
6394 integer_valued_real_p (tree t)
6396 switch (TREE_CODE (t))
6398 case FLOAT_EXPR:
6399 return true;
6401 case ABS_EXPR:
6402 case SAVE_EXPR:
6403 return integer_valued_real_p (TREE_OPERAND (t, 0));
6405 case COMPOUND_EXPR:
6406 case MODIFY_EXPR:
6407 case BIND_EXPR:
6408 return integer_valued_real_p (TREE_OPERAND (t, 1));
6410 case PLUS_EXPR:
6411 case MINUS_EXPR:
6412 case MULT_EXPR:
6413 case MIN_EXPR:
6414 case MAX_EXPR:
6415 return integer_valued_real_p (TREE_OPERAND (t, 0))
6416 && integer_valued_real_p (TREE_OPERAND (t, 1));
6418 case COND_EXPR:
6419 return integer_valued_real_p (TREE_OPERAND (t, 1))
6420 && integer_valued_real_p (TREE_OPERAND (t, 2));
6422 case REAL_CST:
6423 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6425 case NOP_EXPR:
6427 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6428 if (TREE_CODE (type) == INTEGER_TYPE)
6429 return true;
6430 if (TREE_CODE (type) == REAL_TYPE)
6431 return integer_valued_real_p (TREE_OPERAND (t, 0));
6432 break;
6435 case CALL_EXPR:
6436 switch (builtin_mathfn_code (t))
6438 CASE_FLT_FN (BUILT_IN_CEIL):
6439 CASE_FLT_FN (BUILT_IN_FLOOR):
6440 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6441 CASE_FLT_FN (BUILT_IN_RINT):
6442 CASE_FLT_FN (BUILT_IN_ROUND):
6443 CASE_FLT_FN (BUILT_IN_TRUNC):
6444 return true;
6446 CASE_FLT_FN (BUILT_IN_FMIN):
6447 CASE_FLT_FN (BUILT_IN_FMAX):
6448 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6449 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6451 default:
6452 break;
6454 break;
6456 default:
6457 break;
6459 return false;
6462 /* FNDECL is assumed to be a builtin where truncation can be propagated
6463 across (for instance floor((double)f) == (double)floorf (f).
6464 Do the transformation for a call with argument ARG. */
6466 static tree
6467 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6469 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6471 if (!validate_arg (arg, REAL_TYPE))
6472 return NULL_TREE;
6474 /* Integer rounding functions are idempotent. */
6475 if (fcode == builtin_mathfn_code (arg))
6476 return arg;
6478 /* If argument is already integer valued, and we don't need to worry
6479 about setting errno, there's no need to perform rounding. */
6480 if (! flag_errno_math && integer_valued_real_p (arg))
6481 return arg;
6483 if (optimize)
6485 tree arg0 = strip_float_extensions (arg);
6486 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6487 tree newtype = TREE_TYPE (arg0);
6488 tree decl;
6490 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6491 && (decl = mathfn_built_in (newtype, fcode)))
6492 return fold_convert_loc (loc, ftype,
6493 build_call_expr_loc (loc, decl, 1,
6494 fold_convert_loc (loc,
6495 newtype,
6496 arg0)));
6498 return NULL_TREE;
6501 /* FNDECL is assumed to be builtin which can narrow the FP type of
6502 the argument, for instance lround((double)f) -> lroundf (f).
6503 Do the transformation for a call with argument ARG. */
6505 static tree
6506 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6508 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6510 if (!validate_arg (arg, REAL_TYPE))
6511 return NULL_TREE;
6513 /* If argument is already integer valued, and we don't need to worry
6514 about setting errno, there's no need to perform rounding. */
6515 if (! flag_errno_math && integer_valued_real_p (arg))
6516 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6517 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6519 if (optimize)
6521 tree ftype = TREE_TYPE (arg);
6522 tree arg0 = strip_float_extensions (arg);
6523 tree newtype = TREE_TYPE (arg0);
6524 tree decl;
6526 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6527 && (decl = mathfn_built_in (newtype, fcode)))
6528 return build_call_expr_loc (loc, decl, 1,
6529 fold_convert_loc (loc, newtype, arg0));
6532 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
6533 sizeof (int) == sizeof (long). */
6534 if (TYPE_PRECISION (integer_type_node)
6535 == TYPE_PRECISION (long_integer_type_node))
6537 tree newfn = NULL_TREE;
6538 switch (fcode)
6540 CASE_FLT_FN (BUILT_IN_ICEIL):
6541 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6542 break;
6544 CASE_FLT_FN (BUILT_IN_IFLOOR):
6545 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6546 break;
6548 CASE_FLT_FN (BUILT_IN_IROUND):
6549 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6550 break;
6552 CASE_FLT_FN (BUILT_IN_IRINT):
6553 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6554 break;
6556 default:
6557 break;
6560 if (newfn)
6562 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6563 return fold_convert_loc (loc,
6564 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6568 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6569 sizeof (long long) == sizeof (long). */
6570 if (TYPE_PRECISION (long_long_integer_type_node)
6571 == TYPE_PRECISION (long_integer_type_node))
6573 tree newfn = NULL_TREE;
6574 switch (fcode)
6576 CASE_FLT_FN (BUILT_IN_LLCEIL):
6577 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6578 break;
6580 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6581 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6582 break;
6584 CASE_FLT_FN (BUILT_IN_LLROUND):
6585 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6586 break;
6588 CASE_FLT_FN (BUILT_IN_LLRINT):
6589 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6590 break;
6592 default:
6593 break;
6596 if (newfn)
6598 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6599 return fold_convert_loc (loc,
6600 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6604 return NULL_TREE;
6607 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6608 return type. Return NULL_TREE if no simplification can be made. */
6610 static tree
6611 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6613 tree res;
6615 if (!validate_arg (arg, COMPLEX_TYPE)
6616 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6617 return NULL_TREE;
6619 /* Calculate the result when the argument is a constant. */
6620 if (TREE_CODE (arg) == COMPLEX_CST
6621 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6622 type, mpfr_hypot)))
6623 return res;
6625 if (TREE_CODE (arg) == COMPLEX_EXPR)
6627 tree real = TREE_OPERAND (arg, 0);
6628 tree imag = TREE_OPERAND (arg, 1);
6630 /* If either part is zero, cabs is fabs of the other. */
6631 if (real_zerop (real))
6632 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6633 if (real_zerop (imag))
6634 return fold_build1_loc (loc, ABS_EXPR, type, real);
6636 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6637 if (flag_unsafe_math_optimizations
6638 && operand_equal_p (real, imag, OEP_PURE_SAME))
6640 const REAL_VALUE_TYPE sqrt2_trunc
6641 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6642 STRIP_NOPS (real);
6643 return fold_build2_loc (loc, MULT_EXPR, type,
6644 fold_build1_loc (loc, ABS_EXPR, type, real),
6645 build_real (type, sqrt2_trunc));
6649 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6650 if (TREE_CODE (arg) == NEGATE_EXPR
6651 || TREE_CODE (arg) == CONJ_EXPR)
6652 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6654 /* Don't do this when optimizing for size. */
6655 if (flag_unsafe_math_optimizations
6656 && optimize && optimize_function_for_speed_p (cfun))
6658 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6660 if (sqrtfn != NULL_TREE)
6662 tree rpart, ipart, result;
6664 arg = builtin_save_expr (arg);
6666 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6667 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6669 rpart = builtin_save_expr (rpart);
6670 ipart = builtin_save_expr (ipart);
6672 result = fold_build2_loc (loc, PLUS_EXPR, type,
6673 fold_build2_loc (loc, MULT_EXPR, type,
6674 rpart, rpart),
6675 fold_build2_loc (loc, MULT_EXPR, type,
6676 ipart, ipart));
6678 return build_call_expr_loc (loc, sqrtfn, 1, result);
6682 return NULL_TREE;
6685 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
6686 complex tree type of the result. If NEG is true, the imaginary
6687 zero is negative. */
6689 static tree
6690 build_complex_cproj (tree type, bool neg)
6692 REAL_VALUE_TYPE rinf, rzero = dconst0;
6694 real_inf (&rinf);
6695 rzero.sign = neg;
6696 return build_complex (type, build_real (TREE_TYPE (type), rinf),
6697 build_real (TREE_TYPE (type), rzero));
6700 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
6701 return type. Return NULL_TREE if no simplification can be made. */
6703 static tree
6704 fold_builtin_cproj (location_t loc, tree arg, tree type)
6706 if (!validate_arg (arg, COMPLEX_TYPE)
6707 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6708 return NULL_TREE;
6710 /* If there are no infinities, return arg. */
6711 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
6712 return non_lvalue_loc (loc, arg);
6714 /* Calculate the result when the argument is a constant. */
6715 if (TREE_CODE (arg) == COMPLEX_CST)
6717 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
6718 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
6720 if (real_isinf (real) || real_isinf (imag))
6721 return build_complex_cproj (type, imag->sign);
6722 else
6723 return arg;
6725 else if (TREE_CODE (arg) == COMPLEX_EXPR)
6727 tree real = TREE_OPERAND (arg, 0);
6728 tree imag = TREE_OPERAND (arg, 1);
6730 STRIP_NOPS (real);
6731 STRIP_NOPS (imag);
6733 /* If the real part is inf and the imag part is known to be
6734 nonnegative, return (inf + 0i). Remember side-effects are
6735 possible in the imag part. */
6736 if (TREE_CODE (real) == REAL_CST
6737 && real_isinf (TREE_REAL_CST_PTR (real))
6738 && tree_expr_nonnegative_p (imag))
6739 return omit_one_operand_loc (loc, type,
6740 build_complex_cproj (type, false),
6741 arg);
6743 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
6744 Remember side-effects are possible in the real part. */
6745 if (TREE_CODE (imag) == REAL_CST
6746 && real_isinf (TREE_REAL_CST_PTR (imag)))
6747 return
6748 omit_one_operand_loc (loc, type,
6749 build_complex_cproj (type, TREE_REAL_CST_PTR
6750 (imag)->sign), arg);
6753 return NULL_TREE;
6756 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6757 Return NULL_TREE if no simplification can be made. */
6759 static tree
6760 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6763 enum built_in_function fcode;
6764 tree res;
6766 if (!validate_arg (arg, REAL_TYPE))
6767 return NULL_TREE;
6769 /* Calculate the result when the argument is a constant. */
6770 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6771 return res;
6773 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6774 fcode = builtin_mathfn_code (arg);
6775 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6777 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6778 arg = fold_build2_loc (loc, MULT_EXPR, type,
6779 CALL_EXPR_ARG (arg, 0),
6780 build_real (type, dconsthalf));
6781 return build_call_expr_loc (loc, expfn, 1, arg);
6784 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6785 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6787 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6789 if (powfn)
6791 tree arg0 = CALL_EXPR_ARG (arg, 0);
6792 tree tree_root;
6793 /* The inner root was either sqrt or cbrt. */
6794 /* This was a conditional expression but it triggered a bug
6795 in Sun C 5.5. */
6796 REAL_VALUE_TYPE dconstroot;
6797 if (BUILTIN_SQRT_P (fcode))
6798 dconstroot = dconsthalf;
6799 else
6800 dconstroot = dconst_third ();
6802 /* Adjust for the outer root. */
6803 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6804 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6805 tree_root = build_real (type, dconstroot);
6806 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6810 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6811 if (flag_unsafe_math_optimizations
6812 && (fcode == BUILT_IN_POW
6813 || fcode == BUILT_IN_POWF
6814 || fcode == BUILT_IN_POWL))
6816 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6817 tree arg0 = CALL_EXPR_ARG (arg, 0);
6818 tree arg1 = CALL_EXPR_ARG (arg, 1);
6819 tree narg1;
6820 if (!tree_expr_nonnegative_p (arg0))
6821 arg0 = build1 (ABS_EXPR, type, arg0);
6822 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
6823 build_real (type, dconsthalf));
6824 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
6827 return NULL_TREE;
6830 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
6831 Return NULL_TREE if no simplification can be made. */
6833 static tree
6834 fold_builtin_cbrt (location_t loc, tree arg, tree type)
6836 const enum built_in_function fcode = builtin_mathfn_code (arg);
6837 tree res;
6839 if (!validate_arg (arg, REAL_TYPE))
6840 return NULL_TREE;
6842 /* Calculate the result when the argument is a constant. */
6843 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
6844 return res;
6846 if (flag_unsafe_math_optimizations)
6848 /* Optimize cbrt(expN(x)) -> expN(x/3). */
6849 if (BUILTIN_EXPONENT_P (fcode))
6851 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6852 const REAL_VALUE_TYPE third_trunc =
6853 real_value_truncate (TYPE_MODE (type), dconst_third ());
6854 arg = fold_build2_loc (loc, MULT_EXPR, type,
6855 CALL_EXPR_ARG (arg, 0),
6856 build_real (type, third_trunc));
6857 return build_call_expr_loc (loc, expfn, 1, arg);
6860 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
6861 if (BUILTIN_SQRT_P (fcode))
6863 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6865 if (powfn)
6867 tree arg0 = CALL_EXPR_ARG (arg, 0);
6868 tree tree_root;
6869 REAL_VALUE_TYPE dconstroot = dconst_third ();
6871 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6872 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6873 tree_root = build_real (type, dconstroot);
6874 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6878 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
6879 if (BUILTIN_CBRT_P (fcode))
6881 tree arg0 = CALL_EXPR_ARG (arg, 0);
6882 if (tree_expr_nonnegative_p (arg0))
6884 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6886 if (powfn)
6888 tree tree_root;
6889 REAL_VALUE_TYPE dconstroot;
6891 real_arithmetic (&dconstroot, MULT_EXPR,
6892 dconst_third_ptr (), dconst_third_ptr ());
6893 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6894 tree_root = build_real (type, dconstroot);
6895 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6900 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
6901 if (fcode == BUILT_IN_POW
6902 || fcode == BUILT_IN_POWF
6903 || fcode == BUILT_IN_POWL)
6905 tree arg00 = CALL_EXPR_ARG (arg, 0);
6906 tree arg01 = CALL_EXPR_ARG (arg, 1);
6907 if (tree_expr_nonnegative_p (arg00))
6909 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6910 const REAL_VALUE_TYPE dconstroot
6911 = real_value_truncate (TYPE_MODE (type), dconst_third ());
6912 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
6913 build_real (type, dconstroot));
6914 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
6918 return NULL_TREE;
6921 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
6922 TYPE is the type of the return value. Return NULL_TREE if no
6923 simplification can be made. */
6925 static tree
6926 fold_builtin_cos (location_t loc,
6927 tree arg, tree type, tree fndecl)
6929 tree res, narg;
6931 if (!validate_arg (arg, REAL_TYPE))
6932 return NULL_TREE;
6934 /* Calculate the result when the argument is a constant. */
6935 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
6936 return res;
6938 /* Optimize cos(-x) into cos (x). */
6939 if ((narg = fold_strip_sign_ops (arg)))
6940 return build_call_expr_loc (loc, fndecl, 1, narg);
6942 return NULL_TREE;
6945 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
6946 Return NULL_TREE if no simplification can be made. */
6948 static tree
6949 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
6951 if (validate_arg (arg, REAL_TYPE))
6953 tree res, narg;
6955 /* Calculate the result when the argument is a constant. */
6956 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
6957 return res;
6959 /* Optimize cosh(-x) into cosh (x). */
6960 if ((narg = fold_strip_sign_ops (arg)))
6961 return build_call_expr_loc (loc, fndecl, 1, narg);
6964 return NULL_TREE;
6967 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
6968 argument ARG. TYPE is the type of the return value. Return
6969 NULL_TREE if no simplification can be made. */
6971 static tree
6972 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
6973 bool hyper)
6975 if (validate_arg (arg, COMPLEX_TYPE)
6976 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
6978 tree tmp;
6980 /* Calculate the result when the argument is a constant. */
6981 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
6982 return tmp;
6984 /* Optimize fn(-x) into fn(x). */
6985 if ((tmp = fold_strip_sign_ops (arg)))
6986 return build_call_expr_loc (loc, fndecl, 1, tmp);
6989 return NULL_TREE;
6992 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
6993 Return NULL_TREE if no simplification can be made. */
6995 static tree
6996 fold_builtin_tan (tree arg, tree type)
6998 enum built_in_function fcode;
6999 tree res;
7001 if (!validate_arg (arg, REAL_TYPE))
7002 return NULL_TREE;
7004 /* Calculate the result when the argument is a constant. */
7005 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7006 return res;
7008 /* Optimize tan(atan(x)) = x. */
7009 fcode = builtin_mathfn_code (arg);
7010 if (flag_unsafe_math_optimizations
7011 && (fcode == BUILT_IN_ATAN
7012 || fcode == BUILT_IN_ATANF
7013 || fcode == BUILT_IN_ATANL))
7014 return CALL_EXPR_ARG (arg, 0);
7016 return NULL_TREE;
7019 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7020 NULL_TREE if no simplification can be made. */
7022 static tree
7023 fold_builtin_sincos (location_t loc,
7024 tree arg0, tree arg1, tree arg2)
7026 tree type;
7027 tree res, fn, call;
7029 if (!validate_arg (arg0, REAL_TYPE)
7030 || !validate_arg (arg1, POINTER_TYPE)
7031 || !validate_arg (arg2, POINTER_TYPE))
7032 return NULL_TREE;
7034 type = TREE_TYPE (arg0);
7036 /* Calculate the result when the argument is a constant. */
7037 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7038 return res;
7040 /* Canonicalize sincos to cexpi. */
7041 if (!TARGET_C99_FUNCTIONS)
7042 return NULL_TREE;
7043 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7044 if (!fn)
7045 return NULL_TREE;
7047 call = build_call_expr_loc (loc, fn, 1, arg0);
7048 call = builtin_save_expr (call);
7050 return build2 (COMPOUND_EXPR, void_type_node,
7051 build2 (MODIFY_EXPR, void_type_node,
7052 build_fold_indirect_ref_loc (loc, arg1),
7053 build1 (IMAGPART_EXPR, type, call)),
7054 build2 (MODIFY_EXPR, void_type_node,
7055 build_fold_indirect_ref_loc (loc, arg2),
7056 build1 (REALPART_EXPR, type, call)));
7059 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7060 NULL_TREE if no simplification can be made. */
7062 static tree
7063 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7065 tree rtype;
7066 tree realp, imagp, ifn;
7067 tree res;
7069 if (!validate_arg (arg0, COMPLEX_TYPE)
7070 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7071 return NULL_TREE;
7073 /* Calculate the result when the argument is a constant. */
7074 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7075 return res;
7077 rtype = TREE_TYPE (TREE_TYPE (arg0));
7079 /* In case we can figure out the real part of arg0 and it is constant zero
7080 fold to cexpi. */
7081 if (!TARGET_C99_FUNCTIONS)
7082 return NULL_TREE;
7083 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7084 if (!ifn)
7085 return NULL_TREE;
7087 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7088 && real_zerop (realp))
7090 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7091 return build_call_expr_loc (loc, ifn, 1, narg);
7094 /* In case we can easily decompose real and imaginary parts split cexp
7095 to exp (r) * cexpi (i). */
7096 if (flag_unsafe_math_optimizations
7097 && realp)
7099 tree rfn, rcall, icall;
7101 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7102 if (!rfn)
7103 return NULL_TREE;
7105 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7106 if (!imagp)
7107 return NULL_TREE;
7109 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7110 icall = builtin_save_expr (icall);
7111 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7112 rcall = builtin_save_expr (rcall);
7113 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7114 fold_build2_loc (loc, MULT_EXPR, rtype,
7115 rcall,
7116 fold_build1_loc (loc, REALPART_EXPR,
7117 rtype, icall)),
7118 fold_build2_loc (loc, MULT_EXPR, rtype,
7119 rcall,
7120 fold_build1_loc (loc, IMAGPART_EXPR,
7121 rtype, icall)));
7124 return NULL_TREE;
7127 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7128 Return NULL_TREE if no simplification can be made. */
7130 static tree
7131 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7133 if (!validate_arg (arg, REAL_TYPE))
7134 return NULL_TREE;
7136 /* Optimize trunc of constant value. */
7137 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7139 REAL_VALUE_TYPE r, x;
7140 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7142 x = TREE_REAL_CST (arg);
7143 real_trunc (&r, TYPE_MODE (type), &x);
7144 return build_real (type, r);
7147 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7150 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7151 Return NULL_TREE if no simplification can be made. */
7153 static tree
7154 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7156 if (!validate_arg (arg, REAL_TYPE))
7157 return NULL_TREE;
7159 /* Optimize floor of constant value. */
7160 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7162 REAL_VALUE_TYPE x;
7164 x = TREE_REAL_CST (arg);
7165 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7167 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7168 REAL_VALUE_TYPE r;
7170 real_floor (&r, TYPE_MODE (type), &x);
7171 return build_real (type, r);
7175 /* Fold floor (x) where x is nonnegative to trunc (x). */
7176 if (tree_expr_nonnegative_p (arg))
7178 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7179 if (truncfn)
7180 return build_call_expr_loc (loc, truncfn, 1, arg);
7183 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7186 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7187 Return NULL_TREE if no simplification can be made. */
7189 static tree
7190 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7192 if (!validate_arg (arg, REAL_TYPE))
7193 return NULL_TREE;
7195 /* Optimize ceil of constant value. */
7196 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7198 REAL_VALUE_TYPE x;
7200 x = TREE_REAL_CST (arg);
7201 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7203 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7204 REAL_VALUE_TYPE r;
7206 real_ceil (&r, TYPE_MODE (type), &x);
7207 return build_real (type, r);
7211 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7214 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7215 Return NULL_TREE if no simplification can be made. */
7217 static tree
7218 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7220 if (!validate_arg (arg, REAL_TYPE))
7221 return NULL_TREE;
7223 /* Optimize round of constant value. */
7224 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7226 REAL_VALUE_TYPE x;
7228 x = TREE_REAL_CST (arg);
7229 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7231 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7232 REAL_VALUE_TYPE r;
7234 real_round (&r, TYPE_MODE (type), &x);
7235 return build_real (type, r);
7239 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7242 /* Fold function call to builtin lround, lroundf or lroundl (or the
7243 corresponding long long versions) and other rounding functions. ARG
7244 is the argument to the call. Return NULL_TREE if no simplification
7245 can be made. */
7247 static tree
7248 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7250 if (!validate_arg (arg, REAL_TYPE))
7251 return NULL_TREE;
7253 /* Optimize lround of constant value. */
7254 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7256 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7258 if (real_isfinite (&x))
7260 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7261 tree ftype = TREE_TYPE (arg);
7262 double_int val;
7263 REAL_VALUE_TYPE r;
7265 switch (DECL_FUNCTION_CODE (fndecl))
7267 CASE_FLT_FN (BUILT_IN_IFLOOR):
7268 CASE_FLT_FN (BUILT_IN_LFLOOR):
7269 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7270 real_floor (&r, TYPE_MODE (ftype), &x);
7271 break;
7273 CASE_FLT_FN (BUILT_IN_ICEIL):
7274 CASE_FLT_FN (BUILT_IN_LCEIL):
7275 CASE_FLT_FN (BUILT_IN_LLCEIL):
7276 real_ceil (&r, TYPE_MODE (ftype), &x);
7277 break;
7279 CASE_FLT_FN (BUILT_IN_IROUND):
7280 CASE_FLT_FN (BUILT_IN_LROUND):
7281 CASE_FLT_FN (BUILT_IN_LLROUND):
7282 real_round (&r, TYPE_MODE (ftype), &x);
7283 break;
7285 default:
7286 gcc_unreachable ();
7289 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7290 if (double_int_fits_to_tree_p (itype, val))
7291 return double_int_to_tree (itype, val);
7295 switch (DECL_FUNCTION_CODE (fndecl))
7297 CASE_FLT_FN (BUILT_IN_LFLOOR):
7298 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7299 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7300 if (tree_expr_nonnegative_p (arg))
7301 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7302 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7303 break;
7304 default:;
7307 return fold_fixed_mathfn (loc, fndecl, arg);
7310 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7311 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7312 the argument to the call. Return NULL_TREE if no simplification can
7313 be made. */
7315 static tree
7316 fold_builtin_bitop (tree fndecl, tree arg)
7318 if (!validate_arg (arg, INTEGER_TYPE))
7319 return NULL_TREE;
7321 /* Optimize for constant argument. */
7322 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7324 HOST_WIDE_INT hi, width, result;
7325 unsigned HOST_WIDE_INT lo;
7326 tree type;
7328 type = TREE_TYPE (arg);
7329 width = TYPE_PRECISION (type);
7330 lo = TREE_INT_CST_LOW (arg);
7332 /* Clear all the bits that are beyond the type's precision. */
7333 if (width > HOST_BITS_PER_WIDE_INT)
7335 hi = TREE_INT_CST_HIGH (arg);
7336 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7337 hi &= ~((unsigned HOST_WIDE_INT) (-1)
7338 << (width - HOST_BITS_PER_WIDE_INT));
7340 else
7342 hi = 0;
7343 if (width < HOST_BITS_PER_WIDE_INT)
7344 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7347 switch (DECL_FUNCTION_CODE (fndecl))
7349 CASE_INT_FN (BUILT_IN_FFS):
7350 if (lo != 0)
7351 result = ffs_hwi (lo);
7352 else if (hi != 0)
7353 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7354 else
7355 result = 0;
7356 break;
7358 CASE_INT_FN (BUILT_IN_CLZ):
7359 if (hi != 0)
7360 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7361 else if (lo != 0)
7362 result = width - floor_log2 (lo) - 1;
7363 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7364 result = width;
7365 break;
7367 CASE_INT_FN (BUILT_IN_CTZ):
7368 if (lo != 0)
7369 result = ctz_hwi (lo);
7370 else if (hi != 0)
7371 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7372 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7373 result = width;
7374 break;
7376 CASE_INT_FN (BUILT_IN_CLRSB):
7377 if (width > HOST_BITS_PER_WIDE_INT
7378 && (hi & ((unsigned HOST_WIDE_INT) 1
7379 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
7381 hi = ~hi & ~((unsigned HOST_WIDE_INT) (-1)
7382 << (width - HOST_BITS_PER_WIDE_INT - 1));
7383 lo = ~lo;
7385 else if (width <= HOST_BITS_PER_WIDE_INT
7386 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
7387 lo = ~lo & ~((unsigned HOST_WIDE_INT) (-1) << (width - 1));
7388 if (hi != 0)
7389 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
7390 else if (lo != 0)
7391 result = width - floor_log2 (lo) - 2;
7392 else
7393 result = width - 1;
7394 break;
7396 CASE_INT_FN (BUILT_IN_POPCOUNT):
7397 result = 0;
7398 while (lo)
7399 result++, lo &= lo - 1;
7400 while (hi)
7401 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7402 break;
7404 CASE_INT_FN (BUILT_IN_PARITY):
7405 result = 0;
7406 while (lo)
7407 result++, lo &= lo - 1;
7408 while (hi)
7409 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7410 result &= 1;
7411 break;
7413 default:
7414 gcc_unreachable ();
7417 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7420 return NULL_TREE;
7423 /* Fold function call to builtin_bswap and the long and long long
7424 variants. Return NULL_TREE if no simplification can be made. */
7425 static tree
7426 fold_builtin_bswap (tree fndecl, tree arg)
7428 if (! validate_arg (arg, INTEGER_TYPE))
7429 return NULL_TREE;
7431 /* Optimize constant value. */
7432 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7434 HOST_WIDE_INT hi, width, r_hi = 0;
7435 unsigned HOST_WIDE_INT lo, r_lo = 0;
7436 tree type;
7438 type = TREE_TYPE (arg);
7439 width = TYPE_PRECISION (type);
7440 lo = TREE_INT_CST_LOW (arg);
7441 hi = TREE_INT_CST_HIGH (arg);
7443 switch (DECL_FUNCTION_CODE (fndecl))
7445 case BUILT_IN_BSWAP32:
7446 case BUILT_IN_BSWAP64:
7448 int s;
7450 for (s = 0; s < width; s += 8)
7452 int d = width - s - 8;
7453 unsigned HOST_WIDE_INT byte;
7455 if (s < HOST_BITS_PER_WIDE_INT)
7456 byte = (lo >> s) & 0xff;
7457 else
7458 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7460 if (d < HOST_BITS_PER_WIDE_INT)
7461 r_lo |= byte << d;
7462 else
7463 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7467 break;
7469 default:
7470 gcc_unreachable ();
7473 if (width < HOST_BITS_PER_WIDE_INT)
7474 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7475 else
7476 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7479 return NULL_TREE;
7482 /* A subroutine of fold_builtin to fold the various logarithmic
7483 functions. Return NULL_TREE if no simplification can me made.
7484 FUNC is the corresponding MPFR logarithm function. */
7486 static tree
7487 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7488 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7490 if (validate_arg (arg, REAL_TYPE))
7492 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7493 tree res;
7494 const enum built_in_function fcode = builtin_mathfn_code (arg);
7496 /* Calculate the result when the argument is a constant. */
7497 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7498 return res;
7500 /* Special case, optimize logN(expN(x)) = x. */
7501 if (flag_unsafe_math_optimizations
7502 && ((func == mpfr_log
7503 && (fcode == BUILT_IN_EXP
7504 || fcode == BUILT_IN_EXPF
7505 || fcode == BUILT_IN_EXPL))
7506 || (func == mpfr_log2
7507 && (fcode == BUILT_IN_EXP2
7508 || fcode == BUILT_IN_EXP2F
7509 || fcode == BUILT_IN_EXP2L))
7510 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7511 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7513 /* Optimize logN(func()) for various exponential functions. We
7514 want to determine the value "x" and the power "exponent" in
7515 order to transform logN(x**exponent) into exponent*logN(x). */
7516 if (flag_unsafe_math_optimizations)
7518 tree exponent = 0, x = 0;
7520 switch (fcode)
7522 CASE_FLT_FN (BUILT_IN_EXP):
7523 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7524 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7525 dconst_e ()));
7526 exponent = CALL_EXPR_ARG (arg, 0);
7527 break;
7528 CASE_FLT_FN (BUILT_IN_EXP2):
7529 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7530 x = build_real (type, dconst2);
7531 exponent = CALL_EXPR_ARG (arg, 0);
7532 break;
7533 CASE_FLT_FN (BUILT_IN_EXP10):
7534 CASE_FLT_FN (BUILT_IN_POW10):
7535 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7537 REAL_VALUE_TYPE dconst10;
7538 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7539 x = build_real (type, dconst10);
7541 exponent = CALL_EXPR_ARG (arg, 0);
7542 break;
7543 CASE_FLT_FN (BUILT_IN_SQRT):
7544 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7545 x = CALL_EXPR_ARG (arg, 0);
7546 exponent = build_real (type, dconsthalf);
7547 break;
7548 CASE_FLT_FN (BUILT_IN_CBRT):
7549 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7550 x = CALL_EXPR_ARG (arg, 0);
7551 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7552 dconst_third ()));
7553 break;
7554 CASE_FLT_FN (BUILT_IN_POW):
7555 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7556 x = CALL_EXPR_ARG (arg, 0);
7557 exponent = CALL_EXPR_ARG (arg, 1);
7558 break;
7559 default:
7560 break;
7563 /* Now perform the optimization. */
7564 if (x && exponent)
7566 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7567 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7572 return NULL_TREE;
7575 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7576 NULL_TREE if no simplification can be made. */
7578 static tree
7579 fold_builtin_hypot (location_t loc, tree fndecl,
7580 tree arg0, tree arg1, tree type)
7582 tree res, narg0, narg1;
7584 if (!validate_arg (arg0, REAL_TYPE)
7585 || !validate_arg (arg1, REAL_TYPE))
7586 return NULL_TREE;
7588 /* Calculate the result when the argument is a constant. */
7589 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7590 return res;
7592 /* If either argument to hypot has a negate or abs, strip that off.
7593 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7594 narg0 = fold_strip_sign_ops (arg0);
7595 narg1 = fold_strip_sign_ops (arg1);
7596 if (narg0 || narg1)
7598 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7599 narg1 ? narg1 : arg1);
7602 /* If either argument is zero, hypot is fabs of the other. */
7603 if (real_zerop (arg0))
7604 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7605 else if (real_zerop (arg1))
7606 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7608 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7609 if (flag_unsafe_math_optimizations
7610 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7612 const REAL_VALUE_TYPE sqrt2_trunc
7613 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7614 return fold_build2_loc (loc, MULT_EXPR, type,
7615 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7616 build_real (type, sqrt2_trunc));
7619 return NULL_TREE;
7623 /* Fold a builtin function call to pow, powf, or powl. Return
7624 NULL_TREE if no simplification can be made. */
7625 static tree
7626 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7628 tree res;
7630 if (!validate_arg (arg0, REAL_TYPE)
7631 || !validate_arg (arg1, REAL_TYPE))
7632 return NULL_TREE;
7634 /* Calculate the result when the argument is a constant. */
7635 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7636 return res;
7638 /* Optimize pow(1.0,y) = 1.0. */
7639 if (real_onep (arg0))
7640 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7642 if (TREE_CODE (arg1) == REAL_CST
7643 && !TREE_OVERFLOW (arg1))
7645 REAL_VALUE_TYPE cint;
7646 REAL_VALUE_TYPE c;
7647 HOST_WIDE_INT n;
7649 c = TREE_REAL_CST (arg1);
7651 /* Optimize pow(x,0.0) = 1.0. */
7652 if (REAL_VALUES_EQUAL (c, dconst0))
7653 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7654 arg0);
7656 /* Optimize pow(x,1.0) = x. */
7657 if (REAL_VALUES_EQUAL (c, dconst1))
7658 return arg0;
7660 /* Optimize pow(x,-1.0) = 1.0/x. */
7661 if (REAL_VALUES_EQUAL (c, dconstm1))
7662 return fold_build2_loc (loc, RDIV_EXPR, type,
7663 build_real (type, dconst1), arg0);
7665 /* Optimize pow(x,0.5) = sqrt(x). */
7666 if (flag_unsafe_math_optimizations
7667 && REAL_VALUES_EQUAL (c, dconsthalf))
7669 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7671 if (sqrtfn != NULL_TREE)
7672 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7675 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7676 if (flag_unsafe_math_optimizations)
7678 const REAL_VALUE_TYPE dconstroot
7679 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7681 if (REAL_VALUES_EQUAL (c, dconstroot))
7683 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7684 if (cbrtfn != NULL_TREE)
7685 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7689 /* Check for an integer exponent. */
7690 n = real_to_integer (&c);
7691 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7692 if (real_identical (&c, &cint))
7694 /* Attempt to evaluate pow at compile-time, unless this should
7695 raise an exception. */
7696 if (TREE_CODE (arg0) == REAL_CST
7697 && !TREE_OVERFLOW (arg0)
7698 && (n > 0
7699 || (!flag_trapping_math && !flag_errno_math)
7700 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7702 REAL_VALUE_TYPE x;
7703 bool inexact;
7705 x = TREE_REAL_CST (arg0);
7706 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7707 if (flag_unsafe_math_optimizations || !inexact)
7708 return build_real (type, x);
7711 /* Strip sign ops from even integer powers. */
7712 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7714 tree narg0 = fold_strip_sign_ops (arg0);
7715 if (narg0)
7716 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7721 if (flag_unsafe_math_optimizations)
7723 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7725 /* Optimize pow(expN(x),y) = expN(x*y). */
7726 if (BUILTIN_EXPONENT_P (fcode))
7728 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7729 tree arg = CALL_EXPR_ARG (arg0, 0);
7730 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7731 return build_call_expr_loc (loc, expfn, 1, arg);
7734 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7735 if (BUILTIN_SQRT_P (fcode))
7737 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7738 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7739 build_real (type, dconsthalf));
7740 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7743 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7744 if (BUILTIN_CBRT_P (fcode))
7746 tree arg = CALL_EXPR_ARG (arg0, 0);
7747 if (tree_expr_nonnegative_p (arg))
7749 const REAL_VALUE_TYPE dconstroot
7750 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7751 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7752 build_real (type, dconstroot));
7753 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7757 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7758 if (fcode == BUILT_IN_POW
7759 || fcode == BUILT_IN_POWF
7760 || fcode == BUILT_IN_POWL)
7762 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7763 if (tree_expr_nonnegative_p (arg00))
7765 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7766 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7767 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7772 return NULL_TREE;
7775 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7776 Return NULL_TREE if no simplification can be made. */
7777 static tree
7778 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7779 tree arg0, tree arg1, tree type)
7781 if (!validate_arg (arg0, REAL_TYPE)
7782 || !validate_arg (arg1, INTEGER_TYPE))
7783 return NULL_TREE;
7785 /* Optimize pow(1.0,y) = 1.0. */
7786 if (real_onep (arg0))
7787 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7789 if (host_integerp (arg1, 0))
7791 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7793 /* Evaluate powi at compile-time. */
7794 if (TREE_CODE (arg0) == REAL_CST
7795 && !TREE_OVERFLOW (arg0))
7797 REAL_VALUE_TYPE x;
7798 x = TREE_REAL_CST (arg0);
7799 real_powi (&x, TYPE_MODE (type), &x, c);
7800 return build_real (type, x);
7803 /* Optimize pow(x,0) = 1.0. */
7804 if (c == 0)
7805 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7806 arg0);
7808 /* Optimize pow(x,1) = x. */
7809 if (c == 1)
7810 return arg0;
7812 /* Optimize pow(x,-1) = 1.0/x. */
7813 if (c == -1)
7814 return fold_build2_loc (loc, RDIV_EXPR, type,
7815 build_real (type, dconst1), arg0);
7818 return NULL_TREE;
7821 /* A subroutine of fold_builtin to fold the various exponent
7822 functions. Return NULL_TREE if no simplification can be made.
7823 FUNC is the corresponding MPFR exponent function. */
7825 static tree
7826 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7827 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7829 if (validate_arg (arg, REAL_TYPE))
7831 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7832 tree res;
7834 /* Calculate the result when the argument is a constant. */
7835 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
7836 return res;
7838 /* Optimize expN(logN(x)) = x. */
7839 if (flag_unsafe_math_optimizations)
7841 const enum built_in_function fcode = builtin_mathfn_code (arg);
7843 if ((func == mpfr_exp
7844 && (fcode == BUILT_IN_LOG
7845 || fcode == BUILT_IN_LOGF
7846 || fcode == BUILT_IN_LOGL))
7847 || (func == mpfr_exp2
7848 && (fcode == BUILT_IN_LOG2
7849 || fcode == BUILT_IN_LOG2F
7850 || fcode == BUILT_IN_LOG2L))
7851 || (func == mpfr_exp10
7852 && (fcode == BUILT_IN_LOG10
7853 || fcode == BUILT_IN_LOG10F
7854 || fcode == BUILT_IN_LOG10L)))
7855 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7859 return NULL_TREE;
7862 /* Return true if VAR is a VAR_DECL or a component thereof. */
7864 static bool
7865 var_decl_component_p (tree var)
7867 tree inner = var;
7868 while (handled_component_p (inner))
7869 inner = TREE_OPERAND (inner, 0);
7870 return SSA_VAR_P (inner);
7873 /* Fold function call to builtin memset. Return
7874 NULL_TREE if no simplification can be made. */
7876 static tree
7877 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
7878 tree type, bool ignore)
7880 tree var, ret, etype;
7881 unsigned HOST_WIDE_INT length, cval;
7883 if (! validate_arg (dest, POINTER_TYPE)
7884 || ! validate_arg (c, INTEGER_TYPE)
7885 || ! validate_arg (len, INTEGER_TYPE))
7886 return NULL_TREE;
7888 if (! host_integerp (len, 1))
7889 return NULL_TREE;
7891 /* If the LEN parameter is zero, return DEST. */
7892 if (integer_zerop (len))
7893 return omit_one_operand_loc (loc, type, dest, c);
7895 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
7896 return NULL_TREE;
7898 var = dest;
7899 STRIP_NOPS (var);
7900 if (TREE_CODE (var) != ADDR_EXPR)
7901 return NULL_TREE;
7903 var = TREE_OPERAND (var, 0);
7904 if (TREE_THIS_VOLATILE (var))
7905 return NULL_TREE;
7907 etype = TREE_TYPE (var);
7908 if (TREE_CODE (etype) == ARRAY_TYPE)
7909 etype = TREE_TYPE (etype);
7911 if (!INTEGRAL_TYPE_P (etype)
7912 && !POINTER_TYPE_P (etype))
7913 return NULL_TREE;
7915 if (! var_decl_component_p (var))
7916 return NULL_TREE;
7918 length = tree_low_cst (len, 1);
7919 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
7920 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
7921 return NULL_TREE;
7923 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
7924 return NULL_TREE;
7926 if (integer_zerop (c))
7927 cval = 0;
7928 else
7930 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
7931 return NULL_TREE;
7933 cval = TREE_INT_CST_LOW (c);
7934 cval &= 0xff;
7935 cval |= cval << 8;
7936 cval |= cval << 16;
7937 cval |= (cval << 31) << 1;
7940 ret = build_int_cst_type (etype, cval);
7941 var = build_fold_indirect_ref_loc (loc,
7942 fold_convert_loc (loc,
7943 build_pointer_type (etype),
7944 dest));
7945 ret = build2 (MODIFY_EXPR, etype, var, ret);
7946 if (ignore)
7947 return ret;
7949 return omit_one_operand_loc (loc, type, dest, ret);
7952 /* Fold function call to builtin memset. Return
7953 NULL_TREE if no simplification can be made. */
7955 static tree
7956 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
7958 if (! validate_arg (dest, POINTER_TYPE)
7959 || ! validate_arg (size, INTEGER_TYPE))
7960 return NULL_TREE;
7962 if (!ignore)
7963 return NULL_TREE;
7965 /* New argument list transforming bzero(ptr x, int y) to
7966 memset(ptr x, int 0, size_t y). This is done this way
7967 so that if it isn't expanded inline, we fallback to
7968 calling bzero instead of memset. */
7970 return fold_builtin_memset (loc, dest, integer_zero_node,
7971 fold_convert_loc (loc, sizetype, size),
7972 void_type_node, ignore);
7975 /* Fold function call to builtin mem{{,p}cpy,move}. Return
7976 NULL_TREE if no simplification can be made.
7977 If ENDP is 0, return DEST (like memcpy).
7978 If ENDP is 1, return DEST+LEN (like mempcpy).
7979 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
7980 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
7981 (memmove). */
7983 static tree
7984 fold_builtin_memory_op (location_t loc, tree dest, tree src,
7985 tree len, tree type, bool ignore, int endp)
7987 tree destvar, srcvar, expr;
7989 if (! validate_arg (dest, POINTER_TYPE)
7990 || ! validate_arg (src, POINTER_TYPE)
7991 || ! validate_arg (len, INTEGER_TYPE))
7992 return NULL_TREE;
7994 /* If the LEN parameter is zero, return DEST. */
7995 if (integer_zerop (len))
7996 return omit_one_operand_loc (loc, type, dest, src);
7998 /* If SRC and DEST are the same (and not volatile), return
7999 DEST{,+LEN,+LEN-1}. */
8000 if (operand_equal_p (src, dest, 0))
8001 expr = len;
8002 else
8004 tree srctype, desttype;
8005 unsigned int src_align, dest_align;
8006 tree off0;
8008 if (endp == 3)
8010 src_align = get_pointer_alignment (src);
8011 dest_align = get_pointer_alignment (dest);
8013 /* Both DEST and SRC must be pointer types.
8014 ??? This is what old code did. Is the testing for pointer types
8015 really mandatory?
8017 If either SRC is readonly or length is 1, we can use memcpy. */
8018 if (!dest_align || !src_align)
8019 return NULL_TREE;
8020 if (readonly_data_expr (src)
8021 || (host_integerp (len, 1)
8022 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8023 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8025 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8026 if (!fn)
8027 return NULL_TREE;
8028 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8031 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8032 if (TREE_CODE (src) == ADDR_EXPR
8033 && TREE_CODE (dest) == ADDR_EXPR)
8035 tree src_base, dest_base, fn;
8036 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8037 HOST_WIDE_INT size = -1;
8038 HOST_WIDE_INT maxsize = -1;
8040 srcvar = TREE_OPERAND (src, 0);
8041 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8042 &size, &maxsize);
8043 destvar = TREE_OPERAND (dest, 0);
8044 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8045 &size, &maxsize);
8046 if (host_integerp (len, 1))
8047 maxsize = tree_low_cst (len, 1);
8048 else
8049 maxsize = -1;
8050 src_offset /= BITS_PER_UNIT;
8051 dest_offset /= BITS_PER_UNIT;
8052 if (SSA_VAR_P (src_base)
8053 && SSA_VAR_P (dest_base))
8055 if (operand_equal_p (src_base, dest_base, 0)
8056 && ranges_overlap_p (src_offset, maxsize,
8057 dest_offset, maxsize))
8058 return NULL_TREE;
8060 else if (TREE_CODE (src_base) == MEM_REF
8061 && TREE_CODE (dest_base) == MEM_REF)
8063 double_int off;
8064 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8065 TREE_OPERAND (dest_base, 0), 0))
8066 return NULL_TREE;
8067 off = double_int_add (mem_ref_offset (src_base),
8068 shwi_to_double_int (src_offset));
8069 if (!double_int_fits_in_shwi_p (off))
8070 return NULL_TREE;
8071 src_offset = off.low;
8072 off = double_int_add (mem_ref_offset (dest_base),
8073 shwi_to_double_int (dest_offset));
8074 if (!double_int_fits_in_shwi_p (off))
8075 return NULL_TREE;
8076 dest_offset = off.low;
8077 if (ranges_overlap_p (src_offset, maxsize,
8078 dest_offset, maxsize))
8079 return NULL_TREE;
8081 else
8082 return NULL_TREE;
8084 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8085 if (!fn)
8086 return NULL_TREE;
8087 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8090 /* If the destination and source do not alias optimize into
8091 memcpy as well. */
8092 if ((is_gimple_min_invariant (dest)
8093 || TREE_CODE (dest) == SSA_NAME)
8094 && (is_gimple_min_invariant (src)
8095 || TREE_CODE (src) == SSA_NAME))
8097 ao_ref destr, srcr;
8098 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8099 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8100 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8102 tree fn;
8103 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8104 if (!fn)
8105 return NULL_TREE;
8106 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8110 return NULL_TREE;
8113 if (!host_integerp (len, 0))
8114 return NULL_TREE;
8115 /* FIXME:
8116 This logic lose for arguments like (type *)malloc (sizeof (type)),
8117 since we strip the casts of up to VOID return value from malloc.
8118 Perhaps we ought to inherit type from non-VOID argument here? */
8119 STRIP_NOPS (src);
8120 STRIP_NOPS (dest);
8121 if (!POINTER_TYPE_P (TREE_TYPE (src))
8122 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8123 return NULL_TREE;
8124 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8125 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8127 tree tem = TREE_OPERAND (src, 0);
8128 STRIP_NOPS (tem);
8129 if (tem != TREE_OPERAND (src, 0))
8130 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8132 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8134 tree tem = TREE_OPERAND (dest, 0);
8135 STRIP_NOPS (tem);
8136 if (tem != TREE_OPERAND (dest, 0))
8137 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8139 srctype = TREE_TYPE (TREE_TYPE (src));
8140 if (TREE_CODE (srctype) == ARRAY_TYPE
8141 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8143 srctype = TREE_TYPE (srctype);
8144 STRIP_NOPS (src);
8145 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8147 desttype = TREE_TYPE (TREE_TYPE (dest));
8148 if (TREE_CODE (desttype) == ARRAY_TYPE
8149 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8151 desttype = TREE_TYPE (desttype);
8152 STRIP_NOPS (dest);
8153 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8155 if (TREE_ADDRESSABLE (srctype)
8156 || TREE_ADDRESSABLE (desttype))
8157 return NULL_TREE;
8159 src_align = get_pointer_alignment (src);
8160 dest_align = get_pointer_alignment (dest);
8161 if (dest_align < TYPE_ALIGN (desttype)
8162 || src_align < TYPE_ALIGN (srctype))
8163 return NULL_TREE;
8165 if (!ignore)
8166 dest = builtin_save_expr (dest);
8168 /* Build accesses at offset zero with a ref-all character type. */
8169 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8170 ptr_mode, true), 0);
8172 destvar = dest;
8173 STRIP_NOPS (destvar);
8174 if (TREE_CODE (destvar) == ADDR_EXPR
8175 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8176 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8177 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8178 else
8179 destvar = NULL_TREE;
8181 srcvar = src;
8182 STRIP_NOPS (srcvar);
8183 if (TREE_CODE (srcvar) == ADDR_EXPR
8184 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8185 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8187 if (!destvar
8188 || src_align >= TYPE_ALIGN (desttype))
8189 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8190 srcvar, off0);
8191 else if (!STRICT_ALIGNMENT)
8193 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8194 src_align);
8195 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8197 else
8198 srcvar = NULL_TREE;
8200 else
8201 srcvar = NULL_TREE;
8203 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8204 return NULL_TREE;
8206 if (srcvar == NULL_TREE)
8208 STRIP_NOPS (src);
8209 if (src_align >= TYPE_ALIGN (desttype))
8210 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8211 else
8213 if (STRICT_ALIGNMENT)
8214 return NULL_TREE;
8215 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8216 src_align);
8217 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8220 else if (destvar == NULL_TREE)
8222 STRIP_NOPS (dest);
8223 if (dest_align >= TYPE_ALIGN (srctype))
8224 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8225 else
8227 if (STRICT_ALIGNMENT)
8228 return NULL_TREE;
8229 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8230 dest_align);
8231 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8235 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8238 if (ignore)
8239 return expr;
8241 if (endp == 0 || endp == 3)
8242 return omit_one_operand_loc (loc, type, dest, expr);
8244 if (expr == len)
8245 expr = NULL_TREE;
8247 if (endp == 2)
8248 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8249 ssize_int (1));
8251 dest = fold_build_pointer_plus_loc (loc, dest, len);
8252 dest = fold_convert_loc (loc, type, dest);
8253 if (expr)
8254 dest = omit_one_operand_loc (loc, type, dest, expr);
8255 return dest;
8258 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8259 If LEN is not NULL, it represents the length of the string to be
8260 copied. Return NULL_TREE if no simplification can be made. */
8262 tree
8263 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8265 tree fn;
8267 if (!validate_arg (dest, POINTER_TYPE)
8268 || !validate_arg (src, POINTER_TYPE))
8269 return NULL_TREE;
8271 /* If SRC and DEST are the same (and not volatile), return DEST. */
8272 if (operand_equal_p (src, dest, 0))
8273 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8275 if (optimize_function_for_size_p (cfun))
8276 return NULL_TREE;
8278 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8279 if (!fn)
8280 return NULL_TREE;
8282 if (!len)
8284 len = c_strlen (src, 1);
8285 if (! len || TREE_SIDE_EFFECTS (len))
8286 return NULL_TREE;
8289 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8290 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8291 build_call_expr_loc (loc, fn, 3, dest, src, len));
8294 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8295 Return NULL_TREE if no simplification can be made. */
8297 static tree
8298 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8300 tree fn, len, lenp1, call, type;
8302 if (!validate_arg (dest, POINTER_TYPE)
8303 || !validate_arg (src, POINTER_TYPE))
8304 return NULL_TREE;
8306 len = c_strlen (src, 1);
8307 if (!len
8308 || TREE_CODE (len) != INTEGER_CST)
8309 return NULL_TREE;
8311 if (optimize_function_for_size_p (cfun)
8312 /* If length is zero it's small enough. */
8313 && !integer_zerop (len))
8314 return NULL_TREE;
8316 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8317 if (!fn)
8318 return NULL_TREE;
8320 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8321 /* We use dest twice in building our expression. Save it from
8322 multiple expansions. */
8323 dest = builtin_save_expr (dest);
8324 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8326 type = TREE_TYPE (TREE_TYPE (fndecl));
8327 dest = fold_build_pointer_plus_loc (loc, dest, len);
8328 dest = fold_convert_loc (loc, type, dest);
8329 dest = omit_one_operand_loc (loc, type, dest, call);
8330 return dest;
8333 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8334 If SLEN is not NULL, it represents the length of the source string.
8335 Return NULL_TREE if no simplification can be made. */
8337 tree
8338 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8339 tree src, tree len, tree slen)
8341 tree fn;
8343 if (!validate_arg (dest, POINTER_TYPE)
8344 || !validate_arg (src, POINTER_TYPE)
8345 || !validate_arg (len, INTEGER_TYPE))
8346 return NULL_TREE;
8348 /* If the LEN parameter is zero, return DEST. */
8349 if (integer_zerop (len))
8350 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8352 /* We can't compare slen with len as constants below if len is not a
8353 constant. */
8354 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8355 return NULL_TREE;
8357 if (!slen)
8358 slen = c_strlen (src, 1);
8360 /* Now, we must be passed a constant src ptr parameter. */
8361 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8362 return NULL_TREE;
8364 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8366 /* We do not support simplification of this case, though we do
8367 support it when expanding trees into RTL. */
8368 /* FIXME: generate a call to __builtin_memset. */
8369 if (tree_int_cst_lt (slen, len))
8370 return NULL_TREE;
8372 /* OK transform into builtin memcpy. */
8373 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8374 if (!fn)
8375 return NULL_TREE;
8376 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8377 build_call_expr_loc (loc, fn, 3, dest, src, len));
8380 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8381 arguments to the call, and TYPE is its return type.
8382 Return NULL_TREE if no simplification can be made. */
8384 static tree
8385 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8387 if (!validate_arg (arg1, POINTER_TYPE)
8388 || !validate_arg (arg2, INTEGER_TYPE)
8389 || !validate_arg (len, INTEGER_TYPE))
8390 return NULL_TREE;
8391 else
8393 const char *p1;
8395 if (TREE_CODE (arg2) != INTEGER_CST
8396 || !host_integerp (len, 1))
8397 return NULL_TREE;
8399 p1 = c_getstr (arg1);
8400 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8402 char c;
8403 const char *r;
8404 tree tem;
8406 if (target_char_cast (arg2, &c))
8407 return NULL_TREE;
8409 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8411 if (r == NULL)
8412 return build_int_cst (TREE_TYPE (arg1), 0);
8414 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
8415 return fold_convert_loc (loc, type, tem);
8417 return NULL_TREE;
8421 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8422 Return NULL_TREE if no simplification can be made. */
8424 static tree
8425 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8427 const char *p1, *p2;
8429 if (!validate_arg (arg1, POINTER_TYPE)
8430 || !validate_arg (arg2, POINTER_TYPE)
8431 || !validate_arg (len, INTEGER_TYPE))
8432 return NULL_TREE;
8434 /* If the LEN parameter is zero, return zero. */
8435 if (integer_zerop (len))
8436 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8437 arg1, arg2);
8439 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8440 if (operand_equal_p (arg1, arg2, 0))
8441 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8443 p1 = c_getstr (arg1);
8444 p2 = c_getstr (arg2);
8446 /* If all arguments are constant, and the value of len is not greater
8447 than the lengths of arg1 and arg2, evaluate at compile-time. */
8448 if (host_integerp (len, 1) && p1 && p2
8449 && compare_tree_int (len, strlen (p1) + 1) <= 0
8450 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8452 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8454 if (r > 0)
8455 return integer_one_node;
8456 else if (r < 0)
8457 return integer_minus_one_node;
8458 else
8459 return integer_zero_node;
8462 /* If len parameter is one, return an expression corresponding to
8463 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8464 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8466 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8467 tree cst_uchar_ptr_node
8468 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8470 tree ind1
8471 = fold_convert_loc (loc, integer_type_node,
8472 build1 (INDIRECT_REF, cst_uchar_node,
8473 fold_convert_loc (loc,
8474 cst_uchar_ptr_node,
8475 arg1)));
8476 tree ind2
8477 = fold_convert_loc (loc, integer_type_node,
8478 build1 (INDIRECT_REF, cst_uchar_node,
8479 fold_convert_loc (loc,
8480 cst_uchar_ptr_node,
8481 arg2)));
8482 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8485 return NULL_TREE;
8488 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8489 Return NULL_TREE if no simplification can be made. */
8491 static tree
8492 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8494 const char *p1, *p2;
8496 if (!validate_arg (arg1, POINTER_TYPE)
8497 || !validate_arg (arg2, POINTER_TYPE))
8498 return NULL_TREE;
8500 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8501 if (operand_equal_p (arg1, arg2, 0))
8502 return integer_zero_node;
8504 p1 = c_getstr (arg1);
8505 p2 = c_getstr (arg2);
8507 if (p1 && p2)
8509 const int i = strcmp (p1, p2);
8510 if (i < 0)
8511 return integer_minus_one_node;
8512 else if (i > 0)
8513 return integer_one_node;
8514 else
8515 return integer_zero_node;
8518 /* If the second arg is "", return *(const unsigned char*)arg1. */
8519 if (p2 && *p2 == '\0')
8521 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8522 tree cst_uchar_ptr_node
8523 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8525 return fold_convert_loc (loc, integer_type_node,
8526 build1 (INDIRECT_REF, cst_uchar_node,
8527 fold_convert_loc (loc,
8528 cst_uchar_ptr_node,
8529 arg1)));
8532 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8533 if (p1 && *p1 == '\0')
8535 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8536 tree cst_uchar_ptr_node
8537 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8539 tree temp
8540 = fold_convert_loc (loc, integer_type_node,
8541 build1 (INDIRECT_REF, cst_uchar_node,
8542 fold_convert_loc (loc,
8543 cst_uchar_ptr_node,
8544 arg2)));
8545 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8548 return NULL_TREE;
8551 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8552 Return NULL_TREE if no simplification can be made. */
8554 static tree
8555 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8557 const char *p1, *p2;
8559 if (!validate_arg (arg1, POINTER_TYPE)
8560 || !validate_arg (arg2, POINTER_TYPE)
8561 || !validate_arg (len, INTEGER_TYPE))
8562 return NULL_TREE;
8564 /* If the LEN parameter is zero, return zero. */
8565 if (integer_zerop (len))
8566 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8567 arg1, arg2);
8569 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8570 if (operand_equal_p (arg1, arg2, 0))
8571 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8573 p1 = c_getstr (arg1);
8574 p2 = c_getstr (arg2);
8576 if (host_integerp (len, 1) && p1 && p2)
8578 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8579 if (i > 0)
8580 return integer_one_node;
8581 else if (i < 0)
8582 return integer_minus_one_node;
8583 else
8584 return integer_zero_node;
8587 /* If the second arg is "", and the length is greater than zero,
8588 return *(const unsigned char*)arg1. */
8589 if (p2 && *p2 == '\0'
8590 && TREE_CODE (len) == INTEGER_CST
8591 && tree_int_cst_sgn (len) == 1)
8593 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8594 tree cst_uchar_ptr_node
8595 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8597 return fold_convert_loc (loc, integer_type_node,
8598 build1 (INDIRECT_REF, cst_uchar_node,
8599 fold_convert_loc (loc,
8600 cst_uchar_ptr_node,
8601 arg1)));
8604 /* If the first arg is "", and the length is greater than zero,
8605 return -*(const unsigned char*)arg2. */
8606 if (p1 && *p1 == '\0'
8607 && TREE_CODE (len) == INTEGER_CST
8608 && tree_int_cst_sgn (len) == 1)
8610 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8611 tree cst_uchar_ptr_node
8612 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8614 tree temp = fold_convert_loc (loc, integer_type_node,
8615 build1 (INDIRECT_REF, cst_uchar_node,
8616 fold_convert_loc (loc,
8617 cst_uchar_ptr_node,
8618 arg2)));
8619 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8622 /* If len parameter is one, return an expression corresponding to
8623 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8624 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8626 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8627 tree cst_uchar_ptr_node
8628 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8630 tree ind1 = fold_convert_loc (loc, integer_type_node,
8631 build1 (INDIRECT_REF, cst_uchar_node,
8632 fold_convert_loc (loc,
8633 cst_uchar_ptr_node,
8634 arg1)));
8635 tree ind2 = fold_convert_loc (loc, integer_type_node,
8636 build1 (INDIRECT_REF, cst_uchar_node,
8637 fold_convert_loc (loc,
8638 cst_uchar_ptr_node,
8639 arg2)));
8640 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8643 return NULL_TREE;
8646 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8647 ARG. Return NULL_TREE if no simplification can be made. */
8649 static tree
8650 fold_builtin_signbit (location_t loc, tree arg, tree type)
8652 if (!validate_arg (arg, REAL_TYPE))
8653 return NULL_TREE;
8655 /* If ARG is a compile-time constant, determine the result. */
8656 if (TREE_CODE (arg) == REAL_CST
8657 && !TREE_OVERFLOW (arg))
8659 REAL_VALUE_TYPE c;
8661 c = TREE_REAL_CST (arg);
8662 return (REAL_VALUE_NEGATIVE (c)
8663 ? build_one_cst (type)
8664 : build_zero_cst (type));
8667 /* If ARG is non-negative, the result is always zero. */
8668 if (tree_expr_nonnegative_p (arg))
8669 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8671 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8672 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8673 return fold_convert (type,
8674 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8675 build_real (TREE_TYPE (arg), dconst0)));
8677 return NULL_TREE;
8680 /* Fold function call to builtin copysign, copysignf or copysignl with
8681 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8682 be made. */
8684 static tree
8685 fold_builtin_copysign (location_t loc, tree fndecl,
8686 tree arg1, tree arg2, tree type)
8688 tree tem;
8690 if (!validate_arg (arg1, REAL_TYPE)
8691 || !validate_arg (arg2, REAL_TYPE))
8692 return NULL_TREE;
8694 /* copysign(X,X) is X. */
8695 if (operand_equal_p (arg1, arg2, 0))
8696 return fold_convert_loc (loc, type, arg1);
8698 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8699 if (TREE_CODE (arg1) == REAL_CST
8700 && TREE_CODE (arg2) == REAL_CST
8701 && !TREE_OVERFLOW (arg1)
8702 && !TREE_OVERFLOW (arg2))
8704 REAL_VALUE_TYPE c1, c2;
8706 c1 = TREE_REAL_CST (arg1);
8707 c2 = TREE_REAL_CST (arg2);
8708 /* c1.sign := c2.sign. */
8709 real_copysign (&c1, &c2);
8710 return build_real (type, c1);
8713 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8714 Remember to evaluate Y for side-effects. */
8715 if (tree_expr_nonnegative_p (arg2))
8716 return omit_one_operand_loc (loc, type,
8717 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8718 arg2);
8720 /* Strip sign changing operations for the first argument. */
8721 tem = fold_strip_sign_ops (arg1);
8722 if (tem)
8723 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8725 return NULL_TREE;
8728 /* Fold a call to builtin isascii with argument ARG. */
8730 static tree
8731 fold_builtin_isascii (location_t loc, tree arg)
8733 if (!validate_arg (arg, INTEGER_TYPE))
8734 return NULL_TREE;
8735 else
8737 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8738 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8739 build_int_cst (integer_type_node,
8740 ~ (unsigned HOST_WIDE_INT) 0x7f));
8741 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8742 arg, integer_zero_node);
8746 /* Fold a call to builtin toascii with argument ARG. */
8748 static tree
8749 fold_builtin_toascii (location_t loc, tree arg)
8751 if (!validate_arg (arg, INTEGER_TYPE))
8752 return NULL_TREE;
8754 /* Transform toascii(c) -> (c & 0x7f). */
8755 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8756 build_int_cst (integer_type_node, 0x7f));
8759 /* Fold a call to builtin isdigit with argument ARG. */
8761 static tree
8762 fold_builtin_isdigit (location_t loc, tree arg)
8764 if (!validate_arg (arg, INTEGER_TYPE))
8765 return NULL_TREE;
8766 else
8768 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8769 /* According to the C standard, isdigit is unaffected by locale.
8770 However, it definitely is affected by the target character set. */
8771 unsigned HOST_WIDE_INT target_digit0
8772 = lang_hooks.to_target_charset ('0');
8774 if (target_digit0 == 0)
8775 return NULL_TREE;
8777 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8778 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8779 build_int_cst (unsigned_type_node, target_digit0));
8780 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8781 build_int_cst (unsigned_type_node, 9));
8785 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8787 static tree
8788 fold_builtin_fabs (location_t loc, tree arg, tree type)
8790 if (!validate_arg (arg, REAL_TYPE))
8791 return NULL_TREE;
8793 arg = fold_convert_loc (loc, type, arg);
8794 if (TREE_CODE (arg) == REAL_CST)
8795 return fold_abs_const (arg, type);
8796 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8799 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8801 static tree
8802 fold_builtin_abs (location_t loc, tree arg, tree type)
8804 if (!validate_arg (arg, INTEGER_TYPE))
8805 return NULL_TREE;
8807 arg = fold_convert_loc (loc, type, arg);
8808 if (TREE_CODE (arg) == INTEGER_CST)
8809 return fold_abs_const (arg, type);
8810 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8813 /* Fold a fma operation with arguments ARG[012]. */
8815 tree
8816 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8817 tree type, tree arg0, tree arg1, tree arg2)
8819 if (TREE_CODE (arg0) == REAL_CST
8820 && TREE_CODE (arg1) == REAL_CST
8821 && TREE_CODE (arg2) == REAL_CST)
8822 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8824 return NULL_TREE;
8827 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8829 static tree
8830 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8832 if (validate_arg (arg0, REAL_TYPE)
8833 && validate_arg(arg1, REAL_TYPE)
8834 && validate_arg(arg2, REAL_TYPE))
8836 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
8837 if (tem)
8838 return tem;
8840 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8841 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8842 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8844 return NULL_TREE;
8847 /* Fold a call to builtin fmin or fmax. */
8849 static tree
8850 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8851 tree type, bool max)
8853 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8855 /* Calculate the result when the argument is a constant. */
8856 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8858 if (res)
8859 return res;
8861 /* If either argument is NaN, return the other one. Avoid the
8862 transformation if we get (and honor) a signalling NaN. Using
8863 omit_one_operand() ensures we create a non-lvalue. */
8864 if (TREE_CODE (arg0) == REAL_CST
8865 && real_isnan (&TREE_REAL_CST (arg0))
8866 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8867 || ! TREE_REAL_CST (arg0).signalling))
8868 return omit_one_operand_loc (loc, type, arg1, arg0);
8869 if (TREE_CODE (arg1) == REAL_CST
8870 && real_isnan (&TREE_REAL_CST (arg1))
8871 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
8872 || ! TREE_REAL_CST (arg1).signalling))
8873 return omit_one_operand_loc (loc, type, arg0, arg1);
8875 /* Transform fmin/fmax(x,x) -> x. */
8876 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8877 return omit_one_operand_loc (loc, type, arg0, arg1);
8879 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8880 functions to return the numeric arg if the other one is NaN.
8881 These tree codes don't honor that, so only transform if
8882 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8883 handled, so we don't have to worry about it either. */
8884 if (flag_finite_math_only)
8885 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
8886 fold_convert_loc (loc, type, arg0),
8887 fold_convert_loc (loc, type, arg1));
8889 return NULL_TREE;
8892 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8894 static tree
8895 fold_builtin_carg (location_t loc, tree arg, tree type)
8897 if (validate_arg (arg, COMPLEX_TYPE)
8898 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8900 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8902 if (atan2_fn)
8904 tree new_arg = builtin_save_expr (arg);
8905 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8906 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8907 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8911 return NULL_TREE;
8914 /* Fold a call to builtin logb/ilogb. */
8916 static tree
8917 fold_builtin_logb (location_t loc, tree arg, tree rettype)
8919 if (! validate_arg (arg, REAL_TYPE))
8920 return NULL_TREE;
8922 STRIP_NOPS (arg);
8924 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8926 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8928 switch (value->cl)
8930 case rvc_nan:
8931 case rvc_inf:
8932 /* If arg is Inf or NaN and we're logb, return it. */
8933 if (TREE_CODE (rettype) == REAL_TYPE)
8934 return fold_convert_loc (loc, rettype, arg);
8935 /* Fall through... */
8936 case rvc_zero:
8937 /* Zero may set errno and/or raise an exception for logb, also
8938 for ilogb we don't know FP_ILOGB0. */
8939 return NULL_TREE;
8940 case rvc_normal:
8941 /* For normal numbers, proceed iff radix == 2. In GCC,
8942 normalized significands are in the range [0.5, 1.0). We
8943 want the exponent as if they were [1.0, 2.0) so get the
8944 exponent and subtract 1. */
8945 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8946 return fold_convert_loc (loc, rettype,
8947 build_int_cst (integer_type_node,
8948 REAL_EXP (value)-1));
8949 break;
8953 return NULL_TREE;
8956 /* Fold a call to builtin significand, if radix == 2. */
8958 static tree
8959 fold_builtin_significand (location_t loc, tree arg, tree rettype)
8961 if (! validate_arg (arg, REAL_TYPE))
8962 return NULL_TREE;
8964 STRIP_NOPS (arg);
8966 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8968 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8970 switch (value->cl)
8972 case rvc_zero:
8973 case rvc_nan:
8974 case rvc_inf:
8975 /* If arg is +-0, +-Inf or +-NaN, then return it. */
8976 return fold_convert_loc (loc, rettype, arg);
8977 case rvc_normal:
8978 /* For normal numbers, proceed iff radix == 2. */
8979 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8981 REAL_VALUE_TYPE result = *value;
8982 /* In GCC, normalized significands are in the range [0.5,
8983 1.0). We want them to be [1.0, 2.0) so set the
8984 exponent to 1. */
8985 SET_REAL_EXP (&result, 1);
8986 return build_real (rettype, result);
8988 break;
8992 return NULL_TREE;
8995 /* Fold a call to builtin frexp, we can assume the base is 2. */
8997 static tree
8998 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9000 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9001 return NULL_TREE;
9003 STRIP_NOPS (arg0);
9005 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9006 return NULL_TREE;
9008 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9010 /* Proceed if a valid pointer type was passed in. */
9011 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9013 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9014 tree frac, exp;
9016 switch (value->cl)
9018 case rvc_zero:
9019 /* For +-0, return (*exp = 0, +-0). */
9020 exp = integer_zero_node;
9021 frac = arg0;
9022 break;
9023 case rvc_nan:
9024 case rvc_inf:
9025 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9026 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9027 case rvc_normal:
9029 /* Since the frexp function always expects base 2, and in
9030 GCC normalized significands are already in the range
9031 [0.5, 1.0), we have exactly what frexp wants. */
9032 REAL_VALUE_TYPE frac_rvt = *value;
9033 SET_REAL_EXP (&frac_rvt, 0);
9034 frac = build_real (rettype, frac_rvt);
9035 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9037 break;
9038 default:
9039 gcc_unreachable ();
9042 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9043 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9044 TREE_SIDE_EFFECTS (arg1) = 1;
9045 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9048 return NULL_TREE;
9051 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9052 then we can assume the base is two. If it's false, then we have to
9053 check the mode of the TYPE parameter in certain cases. */
9055 static tree
9056 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9057 tree type, bool ldexp)
9059 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9061 STRIP_NOPS (arg0);
9062 STRIP_NOPS (arg1);
9064 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9065 if (real_zerop (arg0) || integer_zerop (arg1)
9066 || (TREE_CODE (arg0) == REAL_CST
9067 && !real_isfinite (&TREE_REAL_CST (arg0))))
9068 return omit_one_operand_loc (loc, type, arg0, arg1);
9070 /* If both arguments are constant, then try to evaluate it. */
9071 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9072 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9073 && host_integerp (arg1, 0))
9075 /* Bound the maximum adjustment to twice the range of the
9076 mode's valid exponents. Use abs to ensure the range is
9077 positive as a sanity check. */
9078 const long max_exp_adj = 2 *
9079 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9080 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9082 /* Get the user-requested adjustment. */
9083 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9085 /* The requested adjustment must be inside this range. This
9086 is a preliminary cap to avoid things like overflow, we
9087 may still fail to compute the result for other reasons. */
9088 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9090 REAL_VALUE_TYPE initial_result;
9092 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9094 /* Ensure we didn't overflow. */
9095 if (! real_isinf (&initial_result))
9097 const REAL_VALUE_TYPE trunc_result
9098 = real_value_truncate (TYPE_MODE (type), initial_result);
9100 /* Only proceed if the target mode can hold the
9101 resulting value. */
9102 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9103 return build_real (type, trunc_result);
9109 return NULL_TREE;
9112 /* Fold a call to builtin modf. */
9114 static tree
9115 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9117 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9118 return NULL_TREE;
9120 STRIP_NOPS (arg0);
9122 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9123 return NULL_TREE;
9125 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9127 /* Proceed if a valid pointer type was passed in. */
9128 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9130 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9131 REAL_VALUE_TYPE trunc, frac;
9133 switch (value->cl)
9135 case rvc_nan:
9136 case rvc_zero:
9137 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9138 trunc = frac = *value;
9139 break;
9140 case rvc_inf:
9141 /* For +-Inf, return (*arg1 = arg0, +-0). */
9142 frac = dconst0;
9143 frac.sign = value->sign;
9144 trunc = *value;
9145 break;
9146 case rvc_normal:
9147 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9148 real_trunc (&trunc, VOIDmode, value);
9149 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9150 /* If the original number was negative and already
9151 integral, then the fractional part is -0.0. */
9152 if (value->sign && frac.cl == rvc_zero)
9153 frac.sign = value->sign;
9154 break;
9157 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9158 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9159 build_real (rettype, trunc));
9160 TREE_SIDE_EFFECTS (arg1) = 1;
9161 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9162 build_real (rettype, frac));
9165 return NULL_TREE;
9168 /* Given a location LOC, an interclass builtin function decl FNDECL
9169 and its single argument ARG, return an folded expression computing
9170 the same, or NULL_TREE if we either couldn't or didn't want to fold
9171 (the latter happen if there's an RTL instruction available). */
9173 static tree
9174 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9176 enum machine_mode mode;
9178 if (!validate_arg (arg, REAL_TYPE))
9179 return NULL_TREE;
9181 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9182 return NULL_TREE;
9184 mode = TYPE_MODE (TREE_TYPE (arg));
9186 /* If there is no optab, try generic code. */
9187 switch (DECL_FUNCTION_CODE (fndecl))
9189 tree result;
9191 CASE_FLT_FN (BUILT_IN_ISINF):
9193 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9194 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9195 tree const type = TREE_TYPE (arg);
9196 REAL_VALUE_TYPE r;
9197 char buf[128];
9199 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9200 real_from_string (&r, buf);
9201 result = build_call_expr (isgr_fn, 2,
9202 fold_build1_loc (loc, ABS_EXPR, type, arg),
9203 build_real (type, r));
9204 return result;
9206 CASE_FLT_FN (BUILT_IN_FINITE):
9207 case BUILT_IN_ISFINITE:
9209 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9210 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9211 tree const type = TREE_TYPE (arg);
9212 REAL_VALUE_TYPE r;
9213 char buf[128];
9215 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9216 real_from_string (&r, buf);
9217 result = build_call_expr (isle_fn, 2,
9218 fold_build1_loc (loc, ABS_EXPR, type, arg),
9219 build_real (type, r));
9220 /*result = fold_build2_loc (loc, UNGT_EXPR,
9221 TREE_TYPE (TREE_TYPE (fndecl)),
9222 fold_build1_loc (loc, ABS_EXPR, type, arg),
9223 build_real (type, r));
9224 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9225 TREE_TYPE (TREE_TYPE (fndecl)),
9226 result);*/
9227 return result;
9229 case BUILT_IN_ISNORMAL:
9231 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9232 islessequal(fabs(x),DBL_MAX). */
9233 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9234 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9235 tree const type = TREE_TYPE (arg);
9236 REAL_VALUE_TYPE rmax, rmin;
9237 char buf[128];
9239 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9240 real_from_string (&rmax, buf);
9241 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9242 real_from_string (&rmin, buf);
9243 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9244 result = build_call_expr (isle_fn, 2, arg,
9245 build_real (type, rmax));
9246 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9247 build_call_expr (isge_fn, 2, arg,
9248 build_real (type, rmin)));
9249 return result;
9251 default:
9252 break;
9255 return NULL_TREE;
9258 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9259 ARG is the argument for the call. */
9261 static tree
9262 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9264 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9265 REAL_VALUE_TYPE r;
9267 if (!validate_arg (arg, REAL_TYPE))
9268 return NULL_TREE;
9270 switch (builtin_index)
9272 case BUILT_IN_ISINF:
9273 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9274 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9276 if (TREE_CODE (arg) == REAL_CST)
9278 r = TREE_REAL_CST (arg);
9279 if (real_isinf (&r))
9280 return real_compare (GT_EXPR, &r, &dconst0)
9281 ? integer_one_node : integer_minus_one_node;
9282 else
9283 return integer_zero_node;
9286 return NULL_TREE;
9288 case BUILT_IN_ISINF_SIGN:
9290 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9291 /* In a boolean context, GCC will fold the inner COND_EXPR to
9292 1. So e.g. "if (isinf_sign(x))" would be folded to just
9293 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9294 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9295 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9296 tree tmp = NULL_TREE;
9298 arg = builtin_save_expr (arg);
9300 if (signbit_fn && isinf_fn)
9302 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9303 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9305 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9306 signbit_call, integer_zero_node);
9307 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9308 isinf_call, integer_zero_node);
9310 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9311 integer_minus_one_node, integer_one_node);
9312 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9313 isinf_call, tmp,
9314 integer_zero_node);
9317 return tmp;
9320 case BUILT_IN_ISFINITE:
9321 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9322 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9323 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9325 if (TREE_CODE (arg) == REAL_CST)
9327 r = TREE_REAL_CST (arg);
9328 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9331 return NULL_TREE;
9333 case BUILT_IN_ISNAN:
9334 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9335 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9337 if (TREE_CODE (arg) == REAL_CST)
9339 r = TREE_REAL_CST (arg);
9340 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9343 arg = builtin_save_expr (arg);
9344 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9346 default:
9347 gcc_unreachable ();
9351 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9352 This builtin will generate code to return the appropriate floating
9353 point classification depending on the value of the floating point
9354 number passed in. The possible return values must be supplied as
9355 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9356 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9357 one floating point argument which is "type generic". */
9359 static tree
9360 fold_builtin_fpclassify (location_t loc, tree exp)
9362 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9363 arg, type, res, tmp;
9364 enum machine_mode mode;
9365 REAL_VALUE_TYPE r;
9366 char buf[128];
9368 /* Verify the required arguments in the original call. */
9369 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9370 INTEGER_TYPE, INTEGER_TYPE,
9371 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9372 return NULL_TREE;
9374 fp_nan = CALL_EXPR_ARG (exp, 0);
9375 fp_infinite = CALL_EXPR_ARG (exp, 1);
9376 fp_normal = CALL_EXPR_ARG (exp, 2);
9377 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9378 fp_zero = CALL_EXPR_ARG (exp, 4);
9379 arg = CALL_EXPR_ARG (exp, 5);
9380 type = TREE_TYPE (arg);
9381 mode = TYPE_MODE (type);
9382 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9384 /* fpclassify(x) ->
9385 isnan(x) ? FP_NAN :
9386 (fabs(x) == Inf ? FP_INFINITE :
9387 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9388 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9390 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9391 build_real (type, dconst0));
9392 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9393 tmp, fp_zero, fp_subnormal);
9395 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9396 real_from_string (&r, buf);
9397 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9398 arg, build_real (type, r));
9399 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9401 if (HONOR_INFINITIES (mode))
9403 real_inf (&r);
9404 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9405 build_real (type, r));
9406 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9407 fp_infinite, res);
9410 if (HONOR_NANS (mode))
9412 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9413 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9416 return res;
9419 /* Fold a call to an unordered comparison function such as
9420 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9421 being called and ARG0 and ARG1 are the arguments for the call.
9422 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9423 the opposite of the desired result. UNORDERED_CODE is used
9424 for modes that can hold NaNs and ORDERED_CODE is used for
9425 the rest. */
9427 static tree
9428 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9429 enum tree_code unordered_code,
9430 enum tree_code ordered_code)
9432 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9433 enum tree_code code;
9434 tree type0, type1;
9435 enum tree_code code0, code1;
9436 tree cmp_type = NULL_TREE;
9438 type0 = TREE_TYPE (arg0);
9439 type1 = TREE_TYPE (arg1);
9441 code0 = TREE_CODE (type0);
9442 code1 = TREE_CODE (type1);
9444 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9445 /* Choose the wider of two real types. */
9446 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9447 ? type0 : type1;
9448 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9449 cmp_type = type0;
9450 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9451 cmp_type = type1;
9453 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9454 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9456 if (unordered_code == UNORDERED_EXPR)
9458 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9459 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9460 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9463 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9464 : ordered_code;
9465 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9466 fold_build2_loc (loc, code, type, arg0, arg1));
9469 /* Fold a call to built-in function FNDECL with 0 arguments.
9470 IGNORE is true if the result of the function call is ignored. This
9471 function returns NULL_TREE if no simplification was possible. */
9473 static tree
9474 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9476 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9477 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9478 switch (fcode)
9480 CASE_FLT_FN (BUILT_IN_INF):
9481 case BUILT_IN_INFD32:
9482 case BUILT_IN_INFD64:
9483 case BUILT_IN_INFD128:
9484 return fold_builtin_inf (loc, type, true);
9486 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9487 return fold_builtin_inf (loc, type, false);
9489 case BUILT_IN_CLASSIFY_TYPE:
9490 return fold_builtin_classify_type (NULL_TREE);
9492 default:
9493 break;
9495 return NULL_TREE;
9498 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9499 IGNORE is true if the result of the function call is ignored. This
9500 function returns NULL_TREE if no simplification was possible. */
9502 static tree
9503 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9505 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9506 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9507 switch (fcode)
9509 case BUILT_IN_CONSTANT_P:
9511 tree val = fold_builtin_constant_p (arg0);
9513 /* Gimplification will pull the CALL_EXPR for the builtin out of
9514 an if condition. When not optimizing, we'll not CSE it back.
9515 To avoid link error types of regressions, return false now. */
9516 if (!val && !optimize)
9517 val = integer_zero_node;
9519 return val;
9522 case BUILT_IN_CLASSIFY_TYPE:
9523 return fold_builtin_classify_type (arg0);
9525 case BUILT_IN_STRLEN:
9526 return fold_builtin_strlen (loc, type, arg0);
9528 CASE_FLT_FN (BUILT_IN_FABS):
9529 return fold_builtin_fabs (loc, arg0, type);
9531 case BUILT_IN_ABS:
9532 case BUILT_IN_LABS:
9533 case BUILT_IN_LLABS:
9534 case BUILT_IN_IMAXABS:
9535 return fold_builtin_abs (loc, arg0, type);
9537 CASE_FLT_FN (BUILT_IN_CONJ):
9538 if (validate_arg (arg0, COMPLEX_TYPE)
9539 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9540 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9541 break;
9543 CASE_FLT_FN (BUILT_IN_CREAL):
9544 if (validate_arg (arg0, COMPLEX_TYPE)
9545 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9546 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9547 break;
9549 CASE_FLT_FN (BUILT_IN_CIMAG):
9550 if (validate_arg (arg0, COMPLEX_TYPE)
9551 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9552 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9553 break;
9555 CASE_FLT_FN (BUILT_IN_CCOS):
9556 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9558 CASE_FLT_FN (BUILT_IN_CCOSH):
9559 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9561 CASE_FLT_FN (BUILT_IN_CPROJ):
9562 return fold_builtin_cproj(loc, arg0, type);
9564 CASE_FLT_FN (BUILT_IN_CSIN):
9565 if (validate_arg (arg0, COMPLEX_TYPE)
9566 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9567 return do_mpc_arg1 (arg0, type, mpc_sin);
9568 break;
9570 CASE_FLT_FN (BUILT_IN_CSINH):
9571 if (validate_arg (arg0, COMPLEX_TYPE)
9572 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9573 return do_mpc_arg1 (arg0, type, mpc_sinh);
9574 break;
9576 CASE_FLT_FN (BUILT_IN_CTAN):
9577 if (validate_arg (arg0, COMPLEX_TYPE)
9578 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9579 return do_mpc_arg1 (arg0, type, mpc_tan);
9580 break;
9582 CASE_FLT_FN (BUILT_IN_CTANH):
9583 if (validate_arg (arg0, COMPLEX_TYPE)
9584 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9585 return do_mpc_arg1 (arg0, type, mpc_tanh);
9586 break;
9588 CASE_FLT_FN (BUILT_IN_CLOG):
9589 if (validate_arg (arg0, COMPLEX_TYPE)
9590 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9591 return do_mpc_arg1 (arg0, type, mpc_log);
9592 break;
9594 CASE_FLT_FN (BUILT_IN_CSQRT):
9595 if (validate_arg (arg0, COMPLEX_TYPE)
9596 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9597 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9598 break;
9600 CASE_FLT_FN (BUILT_IN_CASIN):
9601 if (validate_arg (arg0, COMPLEX_TYPE)
9602 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9603 return do_mpc_arg1 (arg0, type, mpc_asin);
9604 break;
9606 CASE_FLT_FN (BUILT_IN_CACOS):
9607 if (validate_arg (arg0, COMPLEX_TYPE)
9608 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9609 return do_mpc_arg1 (arg0, type, mpc_acos);
9610 break;
9612 CASE_FLT_FN (BUILT_IN_CATAN):
9613 if (validate_arg (arg0, COMPLEX_TYPE)
9614 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9615 return do_mpc_arg1 (arg0, type, mpc_atan);
9616 break;
9618 CASE_FLT_FN (BUILT_IN_CASINH):
9619 if (validate_arg (arg0, COMPLEX_TYPE)
9620 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9621 return do_mpc_arg1 (arg0, type, mpc_asinh);
9622 break;
9624 CASE_FLT_FN (BUILT_IN_CACOSH):
9625 if (validate_arg (arg0, COMPLEX_TYPE)
9626 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9627 return do_mpc_arg1 (arg0, type, mpc_acosh);
9628 break;
9630 CASE_FLT_FN (BUILT_IN_CATANH):
9631 if (validate_arg (arg0, COMPLEX_TYPE)
9632 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9633 return do_mpc_arg1 (arg0, type, mpc_atanh);
9634 break;
9636 CASE_FLT_FN (BUILT_IN_CABS):
9637 return fold_builtin_cabs (loc, arg0, type, fndecl);
9639 CASE_FLT_FN (BUILT_IN_CARG):
9640 return fold_builtin_carg (loc, arg0, type);
9642 CASE_FLT_FN (BUILT_IN_SQRT):
9643 return fold_builtin_sqrt (loc, arg0, type);
9645 CASE_FLT_FN (BUILT_IN_CBRT):
9646 return fold_builtin_cbrt (loc, arg0, type);
9648 CASE_FLT_FN (BUILT_IN_ASIN):
9649 if (validate_arg (arg0, REAL_TYPE))
9650 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9651 &dconstm1, &dconst1, true);
9652 break;
9654 CASE_FLT_FN (BUILT_IN_ACOS):
9655 if (validate_arg (arg0, REAL_TYPE))
9656 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9657 &dconstm1, &dconst1, true);
9658 break;
9660 CASE_FLT_FN (BUILT_IN_ATAN):
9661 if (validate_arg (arg0, REAL_TYPE))
9662 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9663 break;
9665 CASE_FLT_FN (BUILT_IN_ASINH):
9666 if (validate_arg (arg0, REAL_TYPE))
9667 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9668 break;
9670 CASE_FLT_FN (BUILT_IN_ACOSH):
9671 if (validate_arg (arg0, REAL_TYPE))
9672 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9673 &dconst1, NULL, true);
9674 break;
9676 CASE_FLT_FN (BUILT_IN_ATANH):
9677 if (validate_arg (arg0, REAL_TYPE))
9678 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9679 &dconstm1, &dconst1, false);
9680 break;
9682 CASE_FLT_FN (BUILT_IN_SIN):
9683 if (validate_arg (arg0, REAL_TYPE))
9684 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9685 break;
9687 CASE_FLT_FN (BUILT_IN_COS):
9688 return fold_builtin_cos (loc, arg0, type, fndecl);
9690 CASE_FLT_FN (BUILT_IN_TAN):
9691 return fold_builtin_tan (arg0, type);
9693 CASE_FLT_FN (BUILT_IN_CEXP):
9694 return fold_builtin_cexp (loc, arg0, type);
9696 CASE_FLT_FN (BUILT_IN_CEXPI):
9697 if (validate_arg (arg0, REAL_TYPE))
9698 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9699 break;
9701 CASE_FLT_FN (BUILT_IN_SINH):
9702 if (validate_arg (arg0, REAL_TYPE))
9703 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9704 break;
9706 CASE_FLT_FN (BUILT_IN_COSH):
9707 return fold_builtin_cosh (loc, arg0, type, fndecl);
9709 CASE_FLT_FN (BUILT_IN_TANH):
9710 if (validate_arg (arg0, REAL_TYPE))
9711 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9712 break;
9714 CASE_FLT_FN (BUILT_IN_ERF):
9715 if (validate_arg (arg0, REAL_TYPE))
9716 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9717 break;
9719 CASE_FLT_FN (BUILT_IN_ERFC):
9720 if (validate_arg (arg0, REAL_TYPE))
9721 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9722 break;
9724 CASE_FLT_FN (BUILT_IN_TGAMMA):
9725 if (validate_arg (arg0, REAL_TYPE))
9726 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9727 break;
9729 CASE_FLT_FN (BUILT_IN_EXP):
9730 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9732 CASE_FLT_FN (BUILT_IN_EXP2):
9733 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9735 CASE_FLT_FN (BUILT_IN_EXP10):
9736 CASE_FLT_FN (BUILT_IN_POW10):
9737 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9739 CASE_FLT_FN (BUILT_IN_EXPM1):
9740 if (validate_arg (arg0, REAL_TYPE))
9741 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9742 break;
9744 CASE_FLT_FN (BUILT_IN_LOG):
9745 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9747 CASE_FLT_FN (BUILT_IN_LOG2):
9748 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9750 CASE_FLT_FN (BUILT_IN_LOG10):
9751 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9753 CASE_FLT_FN (BUILT_IN_LOG1P):
9754 if (validate_arg (arg0, REAL_TYPE))
9755 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9756 &dconstm1, NULL, false);
9757 break;
9759 CASE_FLT_FN (BUILT_IN_J0):
9760 if (validate_arg (arg0, REAL_TYPE))
9761 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9762 NULL, NULL, 0);
9763 break;
9765 CASE_FLT_FN (BUILT_IN_J1):
9766 if (validate_arg (arg0, REAL_TYPE))
9767 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9768 NULL, NULL, 0);
9769 break;
9771 CASE_FLT_FN (BUILT_IN_Y0):
9772 if (validate_arg (arg0, REAL_TYPE))
9773 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9774 &dconst0, NULL, false);
9775 break;
9777 CASE_FLT_FN (BUILT_IN_Y1):
9778 if (validate_arg (arg0, REAL_TYPE))
9779 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9780 &dconst0, NULL, false);
9781 break;
9783 CASE_FLT_FN (BUILT_IN_NAN):
9784 case BUILT_IN_NAND32:
9785 case BUILT_IN_NAND64:
9786 case BUILT_IN_NAND128:
9787 return fold_builtin_nan (arg0, type, true);
9789 CASE_FLT_FN (BUILT_IN_NANS):
9790 return fold_builtin_nan (arg0, type, false);
9792 CASE_FLT_FN (BUILT_IN_FLOOR):
9793 return fold_builtin_floor (loc, fndecl, arg0);
9795 CASE_FLT_FN (BUILT_IN_CEIL):
9796 return fold_builtin_ceil (loc, fndecl, arg0);
9798 CASE_FLT_FN (BUILT_IN_TRUNC):
9799 return fold_builtin_trunc (loc, fndecl, arg0);
9801 CASE_FLT_FN (BUILT_IN_ROUND):
9802 return fold_builtin_round (loc, fndecl, arg0);
9804 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9805 CASE_FLT_FN (BUILT_IN_RINT):
9806 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9808 CASE_FLT_FN (BUILT_IN_ICEIL):
9809 CASE_FLT_FN (BUILT_IN_LCEIL):
9810 CASE_FLT_FN (BUILT_IN_LLCEIL):
9811 CASE_FLT_FN (BUILT_IN_LFLOOR):
9812 CASE_FLT_FN (BUILT_IN_IFLOOR):
9813 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9814 CASE_FLT_FN (BUILT_IN_IROUND):
9815 CASE_FLT_FN (BUILT_IN_LROUND):
9816 CASE_FLT_FN (BUILT_IN_LLROUND):
9817 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9819 CASE_FLT_FN (BUILT_IN_IRINT):
9820 CASE_FLT_FN (BUILT_IN_LRINT):
9821 CASE_FLT_FN (BUILT_IN_LLRINT):
9822 return fold_fixed_mathfn (loc, fndecl, arg0);
9824 case BUILT_IN_BSWAP32:
9825 case BUILT_IN_BSWAP64:
9826 return fold_builtin_bswap (fndecl, arg0);
9828 CASE_INT_FN (BUILT_IN_FFS):
9829 CASE_INT_FN (BUILT_IN_CLZ):
9830 CASE_INT_FN (BUILT_IN_CTZ):
9831 CASE_INT_FN (BUILT_IN_CLRSB):
9832 CASE_INT_FN (BUILT_IN_POPCOUNT):
9833 CASE_INT_FN (BUILT_IN_PARITY):
9834 return fold_builtin_bitop (fndecl, arg0);
9836 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9837 return fold_builtin_signbit (loc, arg0, type);
9839 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9840 return fold_builtin_significand (loc, arg0, type);
9842 CASE_FLT_FN (BUILT_IN_ILOGB):
9843 CASE_FLT_FN (BUILT_IN_LOGB):
9844 return fold_builtin_logb (loc, arg0, type);
9846 case BUILT_IN_ISASCII:
9847 return fold_builtin_isascii (loc, arg0);
9849 case BUILT_IN_TOASCII:
9850 return fold_builtin_toascii (loc, arg0);
9852 case BUILT_IN_ISDIGIT:
9853 return fold_builtin_isdigit (loc, arg0);
9855 CASE_FLT_FN (BUILT_IN_FINITE):
9856 case BUILT_IN_FINITED32:
9857 case BUILT_IN_FINITED64:
9858 case BUILT_IN_FINITED128:
9859 case BUILT_IN_ISFINITE:
9861 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9862 if (ret)
9863 return ret;
9864 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9867 CASE_FLT_FN (BUILT_IN_ISINF):
9868 case BUILT_IN_ISINFD32:
9869 case BUILT_IN_ISINFD64:
9870 case BUILT_IN_ISINFD128:
9872 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9873 if (ret)
9874 return ret;
9875 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9878 case BUILT_IN_ISNORMAL:
9879 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9881 case BUILT_IN_ISINF_SIGN:
9882 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9884 CASE_FLT_FN (BUILT_IN_ISNAN):
9885 case BUILT_IN_ISNAND32:
9886 case BUILT_IN_ISNAND64:
9887 case BUILT_IN_ISNAND128:
9888 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9890 case BUILT_IN_PRINTF:
9891 case BUILT_IN_PRINTF_UNLOCKED:
9892 case BUILT_IN_VPRINTF:
9893 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
9895 case BUILT_IN_FREE:
9896 if (integer_zerop (arg0))
9897 return build_empty_stmt (loc);
9898 break;
9900 default:
9901 break;
9904 return NULL_TREE;
9908 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9909 IGNORE is true if the result of the function call is ignored. This
9910 function returns NULL_TREE if no simplification was possible. */
9912 static tree
9913 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
9915 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9916 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9918 switch (fcode)
9920 CASE_FLT_FN (BUILT_IN_JN):
9921 if (validate_arg (arg0, INTEGER_TYPE)
9922 && validate_arg (arg1, REAL_TYPE))
9923 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9924 break;
9926 CASE_FLT_FN (BUILT_IN_YN):
9927 if (validate_arg (arg0, INTEGER_TYPE)
9928 && validate_arg (arg1, REAL_TYPE))
9929 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9930 &dconst0, false);
9931 break;
9933 CASE_FLT_FN (BUILT_IN_DREM):
9934 CASE_FLT_FN (BUILT_IN_REMAINDER):
9935 if (validate_arg (arg0, REAL_TYPE)
9936 && validate_arg(arg1, REAL_TYPE))
9937 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9938 break;
9940 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9941 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9942 if (validate_arg (arg0, REAL_TYPE)
9943 && validate_arg(arg1, POINTER_TYPE))
9944 return do_mpfr_lgamma_r (arg0, arg1, type);
9945 break;
9947 CASE_FLT_FN (BUILT_IN_ATAN2):
9948 if (validate_arg (arg0, REAL_TYPE)
9949 && validate_arg(arg1, REAL_TYPE))
9950 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9951 break;
9953 CASE_FLT_FN (BUILT_IN_FDIM):
9954 if (validate_arg (arg0, REAL_TYPE)
9955 && validate_arg(arg1, REAL_TYPE))
9956 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9957 break;
9959 CASE_FLT_FN (BUILT_IN_HYPOT):
9960 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
9962 CASE_FLT_FN (BUILT_IN_CPOW):
9963 if (validate_arg (arg0, COMPLEX_TYPE)
9964 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9965 && validate_arg (arg1, COMPLEX_TYPE)
9966 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
9967 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
9968 break;
9970 CASE_FLT_FN (BUILT_IN_LDEXP):
9971 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
9972 CASE_FLT_FN (BUILT_IN_SCALBN):
9973 CASE_FLT_FN (BUILT_IN_SCALBLN):
9974 return fold_builtin_load_exponent (loc, arg0, arg1,
9975 type, /*ldexp=*/false);
9977 CASE_FLT_FN (BUILT_IN_FREXP):
9978 return fold_builtin_frexp (loc, arg0, arg1, type);
9980 CASE_FLT_FN (BUILT_IN_MODF):
9981 return fold_builtin_modf (loc, arg0, arg1, type);
9983 case BUILT_IN_BZERO:
9984 return fold_builtin_bzero (loc, arg0, arg1, ignore);
9986 case BUILT_IN_FPUTS:
9987 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
9989 case BUILT_IN_FPUTS_UNLOCKED:
9990 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
9992 case BUILT_IN_STRSTR:
9993 return fold_builtin_strstr (loc, arg0, arg1, type);
9995 case BUILT_IN_STRCAT:
9996 return fold_builtin_strcat (loc, arg0, arg1);
9998 case BUILT_IN_STRSPN:
9999 return fold_builtin_strspn (loc, arg0, arg1);
10001 case BUILT_IN_STRCSPN:
10002 return fold_builtin_strcspn (loc, arg0, arg1);
10004 case BUILT_IN_STRCHR:
10005 case BUILT_IN_INDEX:
10006 return fold_builtin_strchr (loc, arg0, arg1, type);
10008 case BUILT_IN_STRRCHR:
10009 case BUILT_IN_RINDEX:
10010 return fold_builtin_strrchr (loc, arg0, arg1, type);
10012 case BUILT_IN_STRCPY:
10013 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10015 case BUILT_IN_STPCPY:
10016 if (ignore)
10018 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10019 if (!fn)
10020 break;
10022 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10024 else
10025 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10026 break;
10028 case BUILT_IN_STRCMP:
10029 return fold_builtin_strcmp (loc, arg0, arg1);
10031 case BUILT_IN_STRPBRK:
10032 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10034 case BUILT_IN_EXPECT:
10035 return fold_builtin_expect (loc, arg0, arg1);
10037 CASE_FLT_FN (BUILT_IN_POW):
10038 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10040 CASE_FLT_FN (BUILT_IN_POWI):
10041 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10043 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10044 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10046 CASE_FLT_FN (BUILT_IN_FMIN):
10047 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10049 CASE_FLT_FN (BUILT_IN_FMAX):
10050 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10052 case BUILT_IN_ISGREATER:
10053 return fold_builtin_unordered_cmp (loc, fndecl,
10054 arg0, arg1, UNLE_EXPR, LE_EXPR);
10055 case BUILT_IN_ISGREATEREQUAL:
10056 return fold_builtin_unordered_cmp (loc, fndecl,
10057 arg0, arg1, UNLT_EXPR, LT_EXPR);
10058 case BUILT_IN_ISLESS:
10059 return fold_builtin_unordered_cmp (loc, fndecl,
10060 arg0, arg1, UNGE_EXPR, GE_EXPR);
10061 case BUILT_IN_ISLESSEQUAL:
10062 return fold_builtin_unordered_cmp (loc, fndecl,
10063 arg0, arg1, UNGT_EXPR, GT_EXPR);
10064 case BUILT_IN_ISLESSGREATER:
10065 return fold_builtin_unordered_cmp (loc, fndecl,
10066 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10067 case BUILT_IN_ISUNORDERED:
10068 return fold_builtin_unordered_cmp (loc, fndecl,
10069 arg0, arg1, UNORDERED_EXPR,
10070 NOP_EXPR);
10072 /* We do the folding for va_start in the expander. */
10073 case BUILT_IN_VA_START:
10074 break;
10076 case BUILT_IN_SPRINTF:
10077 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10079 case BUILT_IN_OBJECT_SIZE:
10080 return fold_builtin_object_size (arg0, arg1);
10082 case BUILT_IN_PRINTF:
10083 case BUILT_IN_PRINTF_UNLOCKED:
10084 case BUILT_IN_VPRINTF:
10085 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10087 case BUILT_IN_PRINTF_CHK:
10088 case BUILT_IN_VPRINTF_CHK:
10089 if (!validate_arg (arg0, INTEGER_TYPE)
10090 || TREE_SIDE_EFFECTS (arg0))
10091 return NULL_TREE;
10092 else
10093 return fold_builtin_printf (loc, fndecl,
10094 arg1, NULL_TREE, ignore, fcode);
10095 break;
10097 case BUILT_IN_FPRINTF:
10098 case BUILT_IN_FPRINTF_UNLOCKED:
10099 case BUILT_IN_VFPRINTF:
10100 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10101 ignore, fcode);
10103 default:
10104 break;
10106 return NULL_TREE;
10109 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10110 and ARG2. IGNORE is true if the result of the function call is ignored.
10111 This function returns NULL_TREE if no simplification was possible. */
10113 static tree
10114 fold_builtin_3 (location_t loc, tree fndecl,
10115 tree arg0, tree arg1, tree arg2, bool ignore)
10117 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10118 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10119 switch (fcode)
10122 CASE_FLT_FN (BUILT_IN_SINCOS):
10123 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10125 CASE_FLT_FN (BUILT_IN_FMA):
10126 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10127 break;
10129 CASE_FLT_FN (BUILT_IN_REMQUO):
10130 if (validate_arg (arg0, REAL_TYPE)
10131 && validate_arg(arg1, REAL_TYPE)
10132 && validate_arg(arg2, POINTER_TYPE))
10133 return do_mpfr_remquo (arg0, arg1, arg2);
10134 break;
10136 case BUILT_IN_MEMSET:
10137 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10139 case BUILT_IN_BCOPY:
10140 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10141 void_type_node, true, /*endp=*/3);
10143 case BUILT_IN_MEMCPY:
10144 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10145 type, ignore, /*endp=*/0);
10147 case BUILT_IN_MEMPCPY:
10148 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10149 type, ignore, /*endp=*/1);
10151 case BUILT_IN_MEMMOVE:
10152 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10153 type, ignore, /*endp=*/3);
10155 case BUILT_IN_STRNCAT:
10156 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10158 case BUILT_IN_STRNCPY:
10159 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10161 case BUILT_IN_STRNCMP:
10162 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10164 case BUILT_IN_MEMCHR:
10165 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10167 case BUILT_IN_BCMP:
10168 case BUILT_IN_MEMCMP:
10169 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10171 case BUILT_IN_SPRINTF:
10172 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10174 case BUILT_IN_SNPRINTF:
10175 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10177 case BUILT_IN_STRCPY_CHK:
10178 case BUILT_IN_STPCPY_CHK:
10179 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10180 ignore, fcode);
10182 case BUILT_IN_STRCAT_CHK:
10183 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10185 case BUILT_IN_PRINTF_CHK:
10186 case BUILT_IN_VPRINTF_CHK:
10187 if (!validate_arg (arg0, INTEGER_TYPE)
10188 || TREE_SIDE_EFFECTS (arg0))
10189 return NULL_TREE;
10190 else
10191 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10192 break;
10194 case BUILT_IN_FPRINTF:
10195 case BUILT_IN_FPRINTF_UNLOCKED:
10196 case BUILT_IN_VFPRINTF:
10197 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10198 ignore, fcode);
10200 case BUILT_IN_FPRINTF_CHK:
10201 case BUILT_IN_VFPRINTF_CHK:
10202 if (!validate_arg (arg1, INTEGER_TYPE)
10203 || TREE_SIDE_EFFECTS (arg1))
10204 return NULL_TREE;
10205 else
10206 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10207 ignore, fcode);
10209 default:
10210 break;
10212 return NULL_TREE;
10215 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10216 ARG2, and ARG3. IGNORE is true if the result of the function call is
10217 ignored. This function returns NULL_TREE if no simplification was
10218 possible. */
10220 static tree
10221 fold_builtin_4 (location_t loc, tree fndecl,
10222 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10224 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10226 switch (fcode)
10228 case BUILT_IN_MEMCPY_CHK:
10229 case BUILT_IN_MEMPCPY_CHK:
10230 case BUILT_IN_MEMMOVE_CHK:
10231 case BUILT_IN_MEMSET_CHK:
10232 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10233 NULL_TREE, ignore,
10234 DECL_FUNCTION_CODE (fndecl));
10236 case BUILT_IN_STRNCPY_CHK:
10237 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10239 case BUILT_IN_STRNCAT_CHK:
10240 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10242 case BUILT_IN_SNPRINTF:
10243 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10245 case BUILT_IN_FPRINTF_CHK:
10246 case BUILT_IN_VFPRINTF_CHK:
10247 if (!validate_arg (arg1, INTEGER_TYPE)
10248 || TREE_SIDE_EFFECTS (arg1))
10249 return NULL_TREE;
10250 else
10251 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10252 ignore, fcode);
10253 break;
10255 default:
10256 break;
10258 return NULL_TREE;
10261 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10262 arguments, where NARGS <= 4. IGNORE is true if the result of the
10263 function call is ignored. This function returns NULL_TREE if no
10264 simplification was possible. Note that this only folds builtins with
10265 fixed argument patterns. Foldings that do varargs-to-varargs
10266 transformations, or that match calls with more than 4 arguments,
10267 need to be handled with fold_builtin_varargs instead. */
10269 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10271 static tree
10272 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10274 tree ret = NULL_TREE;
10276 switch (nargs)
10278 case 0:
10279 ret = fold_builtin_0 (loc, fndecl, ignore);
10280 break;
10281 case 1:
10282 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10283 break;
10284 case 2:
10285 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10286 break;
10287 case 3:
10288 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10289 break;
10290 case 4:
10291 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10292 ignore);
10293 break;
10294 default:
10295 break;
10297 if (ret)
10299 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10300 SET_EXPR_LOCATION (ret, loc);
10301 TREE_NO_WARNING (ret) = 1;
10302 return ret;
10304 return NULL_TREE;
10307 /* Builtins with folding operations that operate on "..." arguments
10308 need special handling; we need to store the arguments in a convenient
10309 data structure before attempting any folding. Fortunately there are
10310 only a few builtins that fall into this category. FNDECL is the
10311 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10312 result of the function call is ignored. */
10314 static tree
10315 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10316 bool ignore ATTRIBUTE_UNUSED)
10318 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10319 tree ret = NULL_TREE;
10321 switch (fcode)
10323 case BUILT_IN_SPRINTF_CHK:
10324 case BUILT_IN_VSPRINTF_CHK:
10325 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10326 break;
10328 case BUILT_IN_SNPRINTF_CHK:
10329 case BUILT_IN_VSNPRINTF_CHK:
10330 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10331 break;
10333 case BUILT_IN_FPCLASSIFY:
10334 ret = fold_builtin_fpclassify (loc, exp);
10335 break;
10337 default:
10338 break;
10340 if (ret)
10342 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10343 SET_EXPR_LOCATION (ret, loc);
10344 TREE_NO_WARNING (ret) = 1;
10345 return ret;
10347 return NULL_TREE;
10350 /* Return true if FNDECL shouldn't be folded right now.
10351 If a built-in function has an inline attribute always_inline
10352 wrapper, defer folding it after always_inline functions have
10353 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10354 might not be performed. */
10356 static bool
10357 avoid_folding_inline_builtin (tree fndecl)
10359 return (DECL_DECLARED_INLINE_P (fndecl)
10360 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10361 && cfun
10362 && !cfun->always_inline_functions_inlined
10363 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10366 /* A wrapper function for builtin folding that prevents warnings for
10367 "statement without effect" and the like, caused by removing the
10368 call node earlier than the warning is generated. */
10370 tree
10371 fold_call_expr (location_t loc, tree exp, bool ignore)
10373 tree ret = NULL_TREE;
10374 tree fndecl = get_callee_fndecl (exp);
10375 if (fndecl
10376 && TREE_CODE (fndecl) == FUNCTION_DECL
10377 && DECL_BUILT_IN (fndecl)
10378 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10379 yet. Defer folding until we see all the arguments
10380 (after inlining). */
10381 && !CALL_EXPR_VA_ARG_PACK (exp))
10383 int nargs = call_expr_nargs (exp);
10385 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10386 instead last argument is __builtin_va_arg_pack (). Defer folding
10387 even in that case, until arguments are finalized. */
10388 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10390 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10391 if (fndecl2
10392 && TREE_CODE (fndecl2) == FUNCTION_DECL
10393 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10394 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10395 return NULL_TREE;
10398 if (avoid_folding_inline_builtin (fndecl))
10399 return NULL_TREE;
10401 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10402 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10403 CALL_EXPR_ARGP (exp), ignore);
10404 else
10406 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10408 tree *args = CALL_EXPR_ARGP (exp);
10409 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10411 if (!ret)
10412 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10413 if (ret)
10414 return ret;
10417 return NULL_TREE;
10420 /* Conveniently construct a function call expression. FNDECL names the
10421 function to be called and N arguments are passed in the array
10422 ARGARRAY. */
10424 tree
10425 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10427 tree fntype = TREE_TYPE (fndecl);
10428 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10430 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10433 /* Conveniently construct a function call expression. FNDECL names the
10434 function to be called and the arguments are passed in the vector
10435 VEC. */
10437 tree
10438 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10440 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10441 VEC_address (tree, vec));
10445 /* Conveniently construct a function call expression. FNDECL names the
10446 function to be called, N is the number of arguments, and the "..."
10447 parameters are the argument expressions. */
10449 tree
10450 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10452 va_list ap;
10453 tree *argarray = XALLOCAVEC (tree, n);
10454 int i;
10456 va_start (ap, n);
10457 for (i = 0; i < n; i++)
10458 argarray[i] = va_arg (ap, tree);
10459 va_end (ap);
10460 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10463 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10464 varargs macros aren't supported by all bootstrap compilers. */
10466 tree
10467 build_call_expr (tree fndecl, int n, ...)
10469 va_list ap;
10470 tree *argarray = XALLOCAVEC (tree, n);
10471 int i;
10473 va_start (ap, n);
10474 for (i = 0; i < n; i++)
10475 argarray[i] = va_arg (ap, tree);
10476 va_end (ap);
10477 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10480 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10481 N arguments are passed in the array ARGARRAY. */
10483 tree
10484 fold_builtin_call_array (location_t loc, tree type,
10485 tree fn,
10486 int n,
10487 tree *argarray)
10489 tree ret = NULL_TREE;
10490 tree exp;
10492 if (TREE_CODE (fn) == ADDR_EXPR)
10494 tree fndecl = TREE_OPERAND (fn, 0);
10495 if (TREE_CODE (fndecl) == FUNCTION_DECL
10496 && DECL_BUILT_IN (fndecl))
10498 /* If last argument is __builtin_va_arg_pack (), arguments to this
10499 function are not finalized yet. Defer folding until they are. */
10500 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10502 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10503 if (fndecl2
10504 && TREE_CODE (fndecl2) == FUNCTION_DECL
10505 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10506 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10507 return build_call_array_loc (loc, type, fn, n, argarray);
10509 if (avoid_folding_inline_builtin (fndecl))
10510 return build_call_array_loc (loc, type, fn, n, argarray);
10511 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10513 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10514 if (ret)
10515 return ret;
10517 return build_call_array_loc (loc, type, fn, n, argarray);
10519 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10521 /* First try the transformations that don't require consing up
10522 an exp. */
10523 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10524 if (ret)
10525 return ret;
10528 /* If we got this far, we need to build an exp. */
10529 exp = build_call_array_loc (loc, type, fn, n, argarray);
10530 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10531 return ret ? ret : exp;
10535 return build_call_array_loc (loc, type, fn, n, argarray);
10538 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10539 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10540 of arguments in ARGS to be omitted. OLDNARGS is the number of
10541 elements in ARGS. */
10543 static tree
10544 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10545 int skip, tree fndecl, int n, va_list newargs)
10547 int nargs = oldnargs - skip + n;
10548 tree *buffer;
10550 if (n > 0)
10552 int i, j;
10554 buffer = XALLOCAVEC (tree, nargs);
10555 for (i = 0; i < n; i++)
10556 buffer[i] = va_arg (newargs, tree);
10557 for (j = skip; j < oldnargs; j++, i++)
10558 buffer[i] = args[j];
10560 else
10561 buffer = args + skip;
10563 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10566 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10567 list ARGS along with N new arguments specified as the "..."
10568 parameters. SKIP is the number of arguments in ARGS to be omitted.
10569 OLDNARGS is the number of elements in ARGS. */
10571 static tree
10572 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
10573 int skip, tree fndecl, int n, ...)
10575 va_list ap;
10576 tree t;
10578 va_start (ap, n);
10579 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
10580 va_end (ap);
10582 return t;
10585 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10586 along with N new arguments specified as the "..." parameters. SKIP
10587 is the number of arguments in EXP to be omitted. This function is used
10588 to do varargs-to-varargs transformations. */
10590 static tree
10591 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10593 va_list ap;
10594 tree t;
10596 va_start (ap, n);
10597 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10598 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10599 va_end (ap);
10601 return t;
10604 /* Validate a single argument ARG against a tree code CODE representing
10605 a type. */
10607 static bool
10608 validate_arg (const_tree arg, enum tree_code code)
10610 if (!arg)
10611 return false;
10612 else if (code == POINTER_TYPE)
10613 return POINTER_TYPE_P (TREE_TYPE (arg));
10614 else if (code == INTEGER_TYPE)
10615 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10616 return code == TREE_CODE (TREE_TYPE (arg));
10619 /* This function validates the types of a function call argument list
10620 against a specified list of tree_codes. If the last specifier is a 0,
10621 that represents an ellipses, otherwise the last specifier must be a
10622 VOID_TYPE.
10624 This is the GIMPLE version of validate_arglist. Eventually we want to
10625 completely convert builtins.c to work from GIMPLEs and the tree based
10626 validate_arglist will then be removed. */
10628 bool
10629 validate_gimple_arglist (const_gimple call, ...)
10631 enum tree_code code;
10632 bool res = 0;
10633 va_list ap;
10634 const_tree arg;
10635 size_t i;
10637 va_start (ap, call);
10638 i = 0;
10642 code = (enum tree_code) va_arg (ap, int);
10643 switch (code)
10645 case 0:
10646 /* This signifies an ellipses, any further arguments are all ok. */
10647 res = true;
10648 goto end;
10649 case VOID_TYPE:
10650 /* This signifies an endlink, if no arguments remain, return
10651 true, otherwise return false. */
10652 res = (i == gimple_call_num_args (call));
10653 goto end;
10654 default:
10655 /* If no parameters remain or the parameter's code does not
10656 match the specified code, return false. Otherwise continue
10657 checking any remaining arguments. */
10658 arg = gimple_call_arg (call, i++);
10659 if (!validate_arg (arg, code))
10660 goto end;
10661 break;
10664 while (1);
10666 /* We need gotos here since we can only have one VA_CLOSE in a
10667 function. */
10668 end: ;
10669 va_end (ap);
10671 return res;
10674 /* This function validates the types of a function call argument list
10675 against a specified list of tree_codes. If the last specifier is a 0,
10676 that represents an ellipses, otherwise the last specifier must be a
10677 VOID_TYPE. */
10679 bool
10680 validate_arglist (const_tree callexpr, ...)
10682 enum tree_code code;
10683 bool res = 0;
10684 va_list ap;
10685 const_call_expr_arg_iterator iter;
10686 const_tree arg;
10688 va_start (ap, callexpr);
10689 init_const_call_expr_arg_iterator (callexpr, &iter);
10693 code = (enum tree_code) va_arg (ap, int);
10694 switch (code)
10696 case 0:
10697 /* This signifies an ellipses, any further arguments are all ok. */
10698 res = true;
10699 goto end;
10700 case VOID_TYPE:
10701 /* This signifies an endlink, if no arguments remain, return
10702 true, otherwise return false. */
10703 res = !more_const_call_expr_args_p (&iter);
10704 goto end;
10705 default:
10706 /* If no parameters remain or the parameter's code does not
10707 match the specified code, return false. Otherwise continue
10708 checking any remaining arguments. */
10709 arg = next_const_call_expr_arg (&iter);
10710 if (!validate_arg (arg, code))
10711 goto end;
10712 break;
10715 while (1);
10717 /* We need gotos here since we can only have one VA_CLOSE in a
10718 function. */
10719 end: ;
10720 va_end (ap);
10722 return res;
10725 /* Default target-specific builtin expander that does nothing. */
10728 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10729 rtx target ATTRIBUTE_UNUSED,
10730 rtx subtarget ATTRIBUTE_UNUSED,
10731 enum machine_mode mode ATTRIBUTE_UNUSED,
10732 int ignore ATTRIBUTE_UNUSED)
10734 return NULL_RTX;
10737 /* Returns true is EXP represents data that would potentially reside
10738 in a readonly section. */
10740 static bool
10741 readonly_data_expr (tree exp)
10743 STRIP_NOPS (exp);
10745 if (TREE_CODE (exp) != ADDR_EXPR)
10746 return false;
10748 exp = get_base_address (TREE_OPERAND (exp, 0));
10749 if (!exp)
10750 return false;
10752 /* Make sure we call decl_readonly_section only for trees it
10753 can handle (since it returns true for everything it doesn't
10754 understand). */
10755 if (TREE_CODE (exp) == STRING_CST
10756 || TREE_CODE (exp) == CONSTRUCTOR
10757 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10758 return decl_readonly_section (exp, 0);
10759 else
10760 return false;
10763 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10764 to the call, and TYPE is its return type.
10766 Return NULL_TREE if no simplification was possible, otherwise return the
10767 simplified form of the call as a tree.
10769 The simplified form may be a constant or other expression which
10770 computes the same value, but in a more efficient manner (including
10771 calls to other builtin functions).
10773 The call may contain arguments which need to be evaluated, but
10774 which are not useful to determine the result of the call. In
10775 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10776 COMPOUND_EXPR will be an argument which must be evaluated.
10777 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10778 COMPOUND_EXPR in the chain will contain the tree for the simplified
10779 form of the builtin function call. */
10781 static tree
10782 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10784 if (!validate_arg (s1, POINTER_TYPE)
10785 || !validate_arg (s2, POINTER_TYPE))
10786 return NULL_TREE;
10787 else
10789 tree fn;
10790 const char *p1, *p2;
10792 p2 = c_getstr (s2);
10793 if (p2 == NULL)
10794 return NULL_TREE;
10796 p1 = c_getstr (s1);
10797 if (p1 != NULL)
10799 const char *r = strstr (p1, p2);
10800 tree tem;
10802 if (r == NULL)
10803 return build_int_cst (TREE_TYPE (s1), 0);
10805 /* Return an offset into the constant string argument. */
10806 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10807 return fold_convert_loc (loc, type, tem);
10810 /* The argument is const char *, and the result is char *, so we need
10811 a type conversion here to avoid a warning. */
10812 if (p2[0] == '\0')
10813 return fold_convert_loc (loc, type, s1);
10815 if (p2[1] != '\0')
10816 return NULL_TREE;
10818 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10819 if (!fn)
10820 return NULL_TREE;
10822 /* New argument list transforming strstr(s1, s2) to
10823 strchr(s1, s2[0]). */
10824 return build_call_expr_loc (loc, fn, 2, s1,
10825 build_int_cst (integer_type_node, p2[0]));
10829 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10830 the call, and TYPE is its return type.
10832 Return NULL_TREE if no simplification was possible, otherwise return the
10833 simplified form of the call as a tree.
10835 The simplified form may be a constant or other expression which
10836 computes the same value, but in a more efficient manner (including
10837 calls to other builtin functions).
10839 The call may contain arguments which need to be evaluated, but
10840 which are not useful to determine the result of the call. In
10841 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10842 COMPOUND_EXPR will be an argument which must be evaluated.
10843 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10844 COMPOUND_EXPR in the chain will contain the tree for the simplified
10845 form of the builtin function call. */
10847 static tree
10848 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10850 if (!validate_arg (s1, POINTER_TYPE)
10851 || !validate_arg (s2, INTEGER_TYPE))
10852 return NULL_TREE;
10853 else
10855 const char *p1;
10857 if (TREE_CODE (s2) != INTEGER_CST)
10858 return NULL_TREE;
10860 p1 = c_getstr (s1);
10861 if (p1 != NULL)
10863 char c;
10864 const char *r;
10865 tree tem;
10867 if (target_char_cast (s2, &c))
10868 return NULL_TREE;
10870 r = strchr (p1, c);
10872 if (r == NULL)
10873 return build_int_cst (TREE_TYPE (s1), 0);
10875 /* Return an offset into the constant string argument. */
10876 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10877 return fold_convert_loc (loc, type, tem);
10879 return NULL_TREE;
10883 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10884 the call, and TYPE is its return type.
10886 Return NULL_TREE if no simplification was possible, otherwise return the
10887 simplified form of the call as a tree.
10889 The simplified form may be a constant or other expression which
10890 computes the same value, but in a more efficient manner (including
10891 calls to other builtin functions).
10893 The call may contain arguments which need to be evaluated, but
10894 which are not useful to determine the result of the call. In
10895 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10896 COMPOUND_EXPR will be an argument which must be evaluated.
10897 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10898 COMPOUND_EXPR in the chain will contain the tree for the simplified
10899 form of the builtin function call. */
10901 static tree
10902 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10904 if (!validate_arg (s1, POINTER_TYPE)
10905 || !validate_arg (s2, INTEGER_TYPE))
10906 return NULL_TREE;
10907 else
10909 tree fn;
10910 const char *p1;
10912 if (TREE_CODE (s2) != INTEGER_CST)
10913 return NULL_TREE;
10915 p1 = c_getstr (s1);
10916 if (p1 != NULL)
10918 char c;
10919 const char *r;
10920 tree tem;
10922 if (target_char_cast (s2, &c))
10923 return NULL_TREE;
10925 r = strrchr (p1, c);
10927 if (r == NULL)
10928 return build_int_cst (TREE_TYPE (s1), 0);
10930 /* Return an offset into the constant string argument. */
10931 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10932 return fold_convert_loc (loc, type, tem);
10935 if (! integer_zerop (s2))
10936 return NULL_TREE;
10938 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10939 if (!fn)
10940 return NULL_TREE;
10942 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10943 return build_call_expr_loc (loc, fn, 2, s1, s2);
10947 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10948 to the call, and TYPE is its return type.
10950 Return NULL_TREE if no simplification was possible, otherwise return the
10951 simplified form of the call as a tree.
10953 The simplified form may be a constant or other expression which
10954 computes the same value, but in a more efficient manner (including
10955 calls to other builtin functions).
10957 The call may contain arguments which need to be evaluated, but
10958 which are not useful to determine the result of the call. In
10959 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10960 COMPOUND_EXPR will be an argument which must be evaluated.
10961 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10962 COMPOUND_EXPR in the chain will contain the tree for the simplified
10963 form of the builtin function call. */
10965 static tree
10966 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10968 if (!validate_arg (s1, POINTER_TYPE)
10969 || !validate_arg (s2, POINTER_TYPE))
10970 return NULL_TREE;
10971 else
10973 tree fn;
10974 const char *p1, *p2;
10976 p2 = c_getstr (s2);
10977 if (p2 == NULL)
10978 return NULL_TREE;
10980 p1 = c_getstr (s1);
10981 if (p1 != NULL)
10983 const char *r = strpbrk (p1, p2);
10984 tree tem;
10986 if (r == NULL)
10987 return build_int_cst (TREE_TYPE (s1), 0);
10989 /* Return an offset into the constant string argument. */
10990 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10991 return fold_convert_loc (loc, type, tem);
10994 if (p2[0] == '\0')
10995 /* strpbrk(x, "") == NULL.
10996 Evaluate and ignore s1 in case it had side-effects. */
10997 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10999 if (p2[1] != '\0')
11000 return NULL_TREE; /* Really call strpbrk. */
11002 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11003 if (!fn)
11004 return NULL_TREE;
11006 /* New argument list transforming strpbrk(s1, s2) to
11007 strchr(s1, s2[0]). */
11008 return build_call_expr_loc (loc, fn, 2, s1,
11009 build_int_cst (integer_type_node, p2[0]));
11013 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11014 to the call.
11016 Return NULL_TREE if no simplification was possible, otherwise return the
11017 simplified form of the call as a tree.
11019 The simplified form may be a constant or other expression which
11020 computes the same value, but in a more efficient manner (including
11021 calls to other builtin functions).
11023 The call may contain arguments which need to be evaluated, but
11024 which are not useful to determine the result of the call. In
11025 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11026 COMPOUND_EXPR will be an argument which must be evaluated.
11027 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11028 COMPOUND_EXPR in the chain will contain the tree for the simplified
11029 form of the builtin function call. */
11031 static tree
11032 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11034 if (!validate_arg (dst, POINTER_TYPE)
11035 || !validate_arg (src, POINTER_TYPE))
11036 return NULL_TREE;
11037 else
11039 const char *p = c_getstr (src);
11041 /* If the string length is zero, return the dst parameter. */
11042 if (p && *p == '\0')
11043 return dst;
11045 if (optimize_insn_for_speed_p ())
11047 /* See if we can store by pieces into (dst + strlen(dst)). */
11048 tree newdst, call;
11049 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11050 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11052 if (!strlen_fn || !strcpy_fn)
11053 return NULL_TREE;
11055 /* If we don't have a movstr we don't want to emit an strcpy
11056 call. We have to do that if the length of the source string
11057 isn't computable (in that case we can use memcpy probably
11058 later expanding to a sequence of mov instructions). If we
11059 have movstr instructions we can emit strcpy calls. */
11060 if (!HAVE_movstr)
11062 tree len = c_strlen (src, 1);
11063 if (! len || TREE_SIDE_EFFECTS (len))
11064 return NULL_TREE;
11067 /* Stabilize the argument list. */
11068 dst = builtin_save_expr (dst);
11070 /* Create strlen (dst). */
11071 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11072 /* Create (dst p+ strlen (dst)). */
11074 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11075 newdst = builtin_save_expr (newdst);
11077 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11078 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11080 return NULL_TREE;
11084 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11085 arguments to the call.
11087 Return NULL_TREE if no simplification was possible, otherwise return the
11088 simplified form of the call as a tree.
11090 The simplified form may be a constant or other expression which
11091 computes the same value, but in a more efficient manner (including
11092 calls to other builtin functions).
11094 The call may contain arguments which need to be evaluated, but
11095 which are not useful to determine the result of the call. In
11096 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11097 COMPOUND_EXPR will be an argument which must be evaluated.
11098 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11099 COMPOUND_EXPR in the chain will contain the tree for the simplified
11100 form of the builtin function call. */
11102 static tree
11103 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11105 if (!validate_arg (dst, POINTER_TYPE)
11106 || !validate_arg (src, POINTER_TYPE)
11107 || !validate_arg (len, INTEGER_TYPE))
11108 return NULL_TREE;
11109 else
11111 const char *p = c_getstr (src);
11113 /* If the requested length is zero, or the src parameter string
11114 length is zero, return the dst parameter. */
11115 if (integer_zerop (len) || (p && *p == '\0'))
11116 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11118 /* If the requested len is greater than or equal to the string
11119 length, call strcat. */
11120 if (TREE_CODE (len) == INTEGER_CST && p
11121 && compare_tree_int (len, strlen (p)) >= 0)
11123 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11125 /* If the replacement _DECL isn't initialized, don't do the
11126 transformation. */
11127 if (!fn)
11128 return NULL_TREE;
11130 return build_call_expr_loc (loc, fn, 2, dst, src);
11132 return NULL_TREE;
11136 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11137 to the call.
11139 Return NULL_TREE if no simplification was possible, otherwise return the
11140 simplified form of the call as a tree.
11142 The simplified form may be a constant or other expression which
11143 computes the same value, but in a more efficient manner (including
11144 calls to other builtin functions).
11146 The call may contain arguments which need to be evaluated, but
11147 which are not useful to determine the result of the call. In
11148 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11149 COMPOUND_EXPR will be an argument which must be evaluated.
11150 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11151 COMPOUND_EXPR in the chain will contain the tree for the simplified
11152 form of the builtin function call. */
11154 static tree
11155 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11157 if (!validate_arg (s1, POINTER_TYPE)
11158 || !validate_arg (s2, POINTER_TYPE))
11159 return NULL_TREE;
11160 else
11162 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11164 /* If both arguments are constants, evaluate at compile-time. */
11165 if (p1 && p2)
11167 const size_t r = strspn (p1, p2);
11168 return size_int (r);
11171 /* If either argument is "", return NULL_TREE. */
11172 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11173 /* Evaluate and ignore both arguments in case either one has
11174 side-effects. */
11175 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11176 s1, s2);
11177 return NULL_TREE;
11181 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11182 to the call.
11184 Return NULL_TREE if no simplification was possible, otherwise return the
11185 simplified form of the call as a tree.
11187 The simplified form may be a constant or other expression which
11188 computes the same value, but in a more efficient manner (including
11189 calls to other builtin functions).
11191 The call may contain arguments which need to be evaluated, but
11192 which are not useful to determine the result of the call. In
11193 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11194 COMPOUND_EXPR will be an argument which must be evaluated.
11195 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11196 COMPOUND_EXPR in the chain will contain the tree for the simplified
11197 form of the builtin function call. */
11199 static tree
11200 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11202 if (!validate_arg (s1, POINTER_TYPE)
11203 || !validate_arg (s2, POINTER_TYPE))
11204 return NULL_TREE;
11205 else
11207 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11209 /* If both arguments are constants, evaluate at compile-time. */
11210 if (p1 && p2)
11212 const size_t r = strcspn (p1, p2);
11213 return size_int (r);
11216 /* If the first argument is "", return NULL_TREE. */
11217 if (p1 && *p1 == '\0')
11219 /* Evaluate and ignore argument s2 in case it has
11220 side-effects. */
11221 return omit_one_operand_loc (loc, size_type_node,
11222 size_zero_node, s2);
11225 /* If the second argument is "", return __builtin_strlen(s1). */
11226 if (p2 && *p2 == '\0')
11228 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11230 /* If the replacement _DECL isn't initialized, don't do the
11231 transformation. */
11232 if (!fn)
11233 return NULL_TREE;
11235 return build_call_expr_loc (loc, fn, 1, s1);
11237 return NULL_TREE;
11241 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11242 to the call. IGNORE is true if the value returned
11243 by the builtin will be ignored. UNLOCKED is true is true if this
11244 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11245 the known length of the string. Return NULL_TREE if no simplification
11246 was possible. */
11248 tree
11249 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11250 bool ignore, bool unlocked, tree len)
11252 /* If we're using an unlocked function, assume the other unlocked
11253 functions exist explicitly. */
11254 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11255 : implicit_built_in_decls[BUILT_IN_FPUTC];
11256 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11257 : implicit_built_in_decls[BUILT_IN_FWRITE];
11259 /* If the return value is used, don't do the transformation. */
11260 if (!ignore)
11261 return NULL_TREE;
11263 /* Verify the arguments in the original call. */
11264 if (!validate_arg (arg0, POINTER_TYPE)
11265 || !validate_arg (arg1, POINTER_TYPE))
11266 return NULL_TREE;
11268 if (! len)
11269 len = c_strlen (arg0, 0);
11271 /* Get the length of the string passed to fputs. If the length
11272 can't be determined, punt. */
11273 if (!len
11274 || TREE_CODE (len) != INTEGER_CST)
11275 return NULL_TREE;
11277 switch (compare_tree_int (len, 1))
11279 case -1: /* length is 0, delete the call entirely . */
11280 return omit_one_operand_loc (loc, integer_type_node,
11281 integer_zero_node, arg1);;
11283 case 0: /* length is 1, call fputc. */
11285 const char *p = c_getstr (arg0);
11287 if (p != NULL)
11289 if (fn_fputc)
11290 return build_call_expr_loc (loc, fn_fputc, 2,
11291 build_int_cst
11292 (integer_type_node, p[0]), arg1);
11293 else
11294 return NULL_TREE;
11297 /* FALLTHROUGH */
11298 case 1: /* length is greater than 1, call fwrite. */
11300 /* If optimizing for size keep fputs. */
11301 if (optimize_function_for_size_p (cfun))
11302 return NULL_TREE;
11303 /* New argument list transforming fputs(string, stream) to
11304 fwrite(string, 1, len, stream). */
11305 if (fn_fwrite)
11306 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11307 size_one_node, len, arg1);
11308 else
11309 return NULL_TREE;
11311 default:
11312 gcc_unreachable ();
11314 return NULL_TREE;
11317 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11318 produced. False otherwise. This is done so that we don't output the error
11319 or warning twice or three times. */
11321 bool
11322 fold_builtin_next_arg (tree exp, bool va_start_p)
11324 tree fntype = TREE_TYPE (current_function_decl);
11325 int nargs = call_expr_nargs (exp);
11326 tree arg;
11328 if (!stdarg_p (fntype))
11330 error ("%<va_start%> used in function with fixed args");
11331 return true;
11334 if (va_start_p)
11336 if (va_start_p && (nargs != 2))
11338 error ("wrong number of arguments to function %<va_start%>");
11339 return true;
11341 arg = CALL_EXPR_ARG (exp, 1);
11343 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11344 when we checked the arguments and if needed issued a warning. */
11345 else
11347 if (nargs == 0)
11349 /* Evidently an out of date version of <stdarg.h>; can't validate
11350 va_start's second argument, but can still work as intended. */
11351 warning (0, "%<__builtin_next_arg%> called without an argument");
11352 return true;
11354 else if (nargs > 1)
11356 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11357 return true;
11359 arg = CALL_EXPR_ARG (exp, 0);
11362 if (TREE_CODE (arg) == SSA_NAME)
11363 arg = SSA_NAME_VAR (arg);
11365 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11366 or __builtin_next_arg (0) the first time we see it, after checking
11367 the arguments and if needed issuing a warning. */
11368 if (!integer_zerop (arg))
11370 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11372 /* Strip off all nops for the sake of the comparison. This
11373 is not quite the same as STRIP_NOPS. It does more.
11374 We must also strip off INDIRECT_EXPR for C++ reference
11375 parameters. */
11376 while (CONVERT_EXPR_P (arg)
11377 || TREE_CODE (arg) == INDIRECT_REF)
11378 arg = TREE_OPERAND (arg, 0);
11379 if (arg != last_parm)
11381 /* FIXME: Sometimes with the tree optimizers we can get the
11382 not the last argument even though the user used the last
11383 argument. We just warn and set the arg to be the last
11384 argument so that we will get wrong-code because of
11385 it. */
11386 warning (0, "second parameter of %<va_start%> not last named argument");
11389 /* Undefined by C99 7.15.1.4p4 (va_start):
11390 "If the parameter parmN is declared with the register storage
11391 class, with a function or array type, or with a type that is
11392 not compatible with the type that results after application of
11393 the default argument promotions, the behavior is undefined."
11395 else if (DECL_REGISTER (arg))
11396 warning (0, "undefined behaviour when second parameter of "
11397 "%<va_start%> is declared with %<register%> storage");
11399 /* We want to verify the second parameter just once before the tree
11400 optimizers are run and then avoid keeping it in the tree,
11401 as otherwise we could warn even for correct code like:
11402 void foo (int i, ...)
11403 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11404 if (va_start_p)
11405 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11406 else
11407 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11409 return false;
11413 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11414 ORIG may be null if this is a 2-argument call. We don't attempt to
11415 simplify calls with more than 3 arguments.
11417 Return NULL_TREE if no simplification was possible, otherwise return the
11418 simplified form of the call as a tree. If IGNORED is true, it means that
11419 the caller does not use the returned value of the function. */
11421 static tree
11422 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11423 tree orig, int ignored)
11425 tree call, retval;
11426 const char *fmt_str = NULL;
11428 /* Verify the required arguments in the original call. We deal with two
11429 types of sprintf() calls: 'sprintf (str, fmt)' and
11430 'sprintf (dest, "%s", orig)'. */
11431 if (!validate_arg (dest, POINTER_TYPE)
11432 || !validate_arg (fmt, POINTER_TYPE))
11433 return NULL_TREE;
11434 if (orig && !validate_arg (orig, POINTER_TYPE))
11435 return NULL_TREE;
11437 /* Check whether the format is a literal string constant. */
11438 fmt_str = c_getstr (fmt);
11439 if (fmt_str == NULL)
11440 return NULL_TREE;
11442 call = NULL_TREE;
11443 retval = NULL_TREE;
11445 if (!init_target_chars ())
11446 return NULL_TREE;
11448 /* If the format doesn't contain % args or %%, use strcpy. */
11449 if (strchr (fmt_str, target_percent) == NULL)
11451 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11453 if (!fn)
11454 return NULL_TREE;
11456 /* Don't optimize sprintf (buf, "abc", ptr++). */
11457 if (orig)
11458 return NULL_TREE;
11460 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11461 'format' is known to contain no % formats. */
11462 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11463 if (!ignored)
11464 retval = build_int_cst (integer_type_node, strlen (fmt_str));
11467 /* If the format is "%s", use strcpy if the result isn't used. */
11468 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11470 tree fn;
11471 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11473 if (!fn)
11474 return NULL_TREE;
11476 /* Don't crash on sprintf (str1, "%s"). */
11477 if (!orig)
11478 return NULL_TREE;
11480 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11481 if (!ignored)
11483 retval = c_strlen (orig, 1);
11484 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11485 return NULL_TREE;
11487 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11490 if (call && retval)
11492 retval = fold_convert_loc
11493 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11494 retval);
11495 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11497 else
11498 return call;
11501 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
11502 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
11503 attempt to simplify calls with more than 4 arguments.
11505 Return NULL_TREE if no simplification was possible, otherwise return the
11506 simplified form of the call as a tree. If IGNORED is true, it means that
11507 the caller does not use the returned value of the function. */
11509 static tree
11510 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
11511 tree orig, int ignored)
11513 tree call, retval;
11514 const char *fmt_str = NULL;
11515 unsigned HOST_WIDE_INT destlen;
11517 /* Verify the required arguments in the original call. We deal with two
11518 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
11519 'snprintf (dest, cst, "%s", orig)'. */
11520 if (!validate_arg (dest, POINTER_TYPE)
11521 || !validate_arg (destsize, INTEGER_TYPE)
11522 || !validate_arg (fmt, POINTER_TYPE))
11523 return NULL_TREE;
11524 if (orig && !validate_arg (orig, POINTER_TYPE))
11525 return NULL_TREE;
11527 if (!host_integerp (destsize, 1))
11528 return NULL_TREE;
11530 /* Check whether the format is a literal string constant. */
11531 fmt_str = c_getstr (fmt);
11532 if (fmt_str == NULL)
11533 return NULL_TREE;
11535 call = NULL_TREE;
11536 retval = NULL_TREE;
11538 if (!init_target_chars ())
11539 return NULL_TREE;
11541 destlen = tree_low_cst (destsize, 1);
11543 /* If the format doesn't contain % args or %%, use strcpy. */
11544 if (strchr (fmt_str, target_percent) == NULL)
11546 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11547 size_t len = strlen (fmt_str);
11549 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
11550 if (orig)
11551 return NULL_TREE;
11553 /* We could expand this as
11554 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
11555 or to
11556 memcpy (str, fmt_with_nul_at_cstm1, cst);
11557 but in the former case that might increase code size
11558 and in the latter case grow .rodata section too much.
11559 So punt for now. */
11560 if (len >= destlen)
11561 return NULL_TREE;
11563 if (!fn)
11564 return NULL_TREE;
11566 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
11567 'format' is known to contain no % formats and
11568 strlen (fmt) < cst. */
11569 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11571 if (!ignored)
11572 retval = build_int_cst (integer_type_node, strlen (fmt_str));
11575 /* If the format is "%s", use strcpy if the result isn't used. */
11576 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11578 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11579 unsigned HOST_WIDE_INT origlen;
11581 /* Don't crash on snprintf (str1, cst, "%s"). */
11582 if (!orig)
11583 return NULL_TREE;
11585 retval = c_strlen (orig, 1);
11586 if (!retval || !host_integerp (retval, 1))
11587 return NULL_TREE;
11589 origlen = tree_low_cst (retval, 1);
11590 /* We could expand this as
11591 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
11592 or to
11593 memcpy (str1, str2_with_nul_at_cstm1, cst);
11594 but in the former case that might increase code size
11595 and in the latter case grow .rodata section too much.
11596 So punt for now. */
11597 if (origlen >= destlen)
11598 return NULL_TREE;
11600 /* Convert snprintf (str1, cst, "%s", str2) into
11601 strcpy (str1, str2) if strlen (str2) < cst. */
11602 if (!fn)
11603 return NULL_TREE;
11605 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11607 if (ignored)
11608 retval = NULL_TREE;
11611 if (call && retval)
11613 tree fn = built_in_decls[BUILT_IN_SNPRINTF];
11614 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
11615 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11617 else
11618 return call;
11621 /* Expand a call EXP to __builtin_object_size. */
11624 expand_builtin_object_size (tree exp)
11626 tree ost;
11627 int object_size_type;
11628 tree fndecl = get_callee_fndecl (exp);
11630 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11632 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11633 exp, fndecl);
11634 expand_builtin_trap ();
11635 return const0_rtx;
11638 ost = CALL_EXPR_ARG (exp, 1);
11639 STRIP_NOPS (ost);
11641 if (TREE_CODE (ost) != INTEGER_CST
11642 || tree_int_cst_sgn (ost) < 0
11643 || compare_tree_int (ost, 3) > 0)
11645 error ("%Klast argument of %D is not integer constant between 0 and 3",
11646 exp, fndecl);
11647 expand_builtin_trap ();
11648 return const0_rtx;
11651 object_size_type = tree_low_cst (ost, 0);
11653 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11656 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11657 FCODE is the BUILT_IN_* to use.
11658 Return NULL_RTX if we failed; the caller should emit a normal call,
11659 otherwise try to get the result in TARGET, if convenient (and in
11660 mode MODE if that's convenient). */
11662 static rtx
11663 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11664 enum built_in_function fcode)
11666 tree dest, src, len, size;
11668 if (!validate_arglist (exp,
11669 POINTER_TYPE,
11670 fcode == BUILT_IN_MEMSET_CHK
11671 ? INTEGER_TYPE : POINTER_TYPE,
11672 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11673 return NULL_RTX;
11675 dest = CALL_EXPR_ARG (exp, 0);
11676 src = CALL_EXPR_ARG (exp, 1);
11677 len = CALL_EXPR_ARG (exp, 2);
11678 size = CALL_EXPR_ARG (exp, 3);
11680 if (! host_integerp (size, 1))
11681 return NULL_RTX;
11683 if (host_integerp (len, 1) || integer_all_onesp (size))
11685 tree fn;
11687 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11689 warning_at (tree_nonartificial_location (exp),
11690 0, "%Kcall to %D will always overflow destination buffer",
11691 exp, get_callee_fndecl (exp));
11692 return NULL_RTX;
11695 fn = NULL_TREE;
11696 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11697 mem{cpy,pcpy,move,set} is available. */
11698 switch (fcode)
11700 case BUILT_IN_MEMCPY_CHK:
11701 fn = built_in_decls[BUILT_IN_MEMCPY];
11702 break;
11703 case BUILT_IN_MEMPCPY_CHK:
11704 fn = built_in_decls[BUILT_IN_MEMPCPY];
11705 break;
11706 case BUILT_IN_MEMMOVE_CHK:
11707 fn = built_in_decls[BUILT_IN_MEMMOVE];
11708 break;
11709 case BUILT_IN_MEMSET_CHK:
11710 fn = built_in_decls[BUILT_IN_MEMSET];
11711 break;
11712 default:
11713 break;
11716 if (! fn)
11717 return NULL_RTX;
11719 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11720 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11721 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11722 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11724 else if (fcode == BUILT_IN_MEMSET_CHK)
11725 return NULL_RTX;
11726 else
11728 unsigned int dest_align = get_pointer_alignment (dest);
11730 /* If DEST is not a pointer type, call the normal function. */
11731 if (dest_align == 0)
11732 return NULL_RTX;
11734 /* If SRC and DEST are the same (and not volatile), do nothing. */
11735 if (operand_equal_p (src, dest, 0))
11737 tree expr;
11739 if (fcode != BUILT_IN_MEMPCPY_CHK)
11741 /* Evaluate and ignore LEN in case it has side-effects. */
11742 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11743 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11746 expr = fold_build_pointer_plus (dest, len);
11747 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11750 /* __memmove_chk special case. */
11751 if (fcode == BUILT_IN_MEMMOVE_CHK)
11753 unsigned int src_align = get_pointer_alignment (src);
11755 if (src_align == 0)
11756 return NULL_RTX;
11758 /* If src is categorized for a readonly section we can use
11759 normal __memcpy_chk. */
11760 if (readonly_data_expr (src))
11762 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11763 if (!fn)
11764 return NULL_RTX;
11765 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11766 dest, src, len, size);
11767 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11768 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11769 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11772 return NULL_RTX;
11776 /* Emit warning if a buffer overflow is detected at compile time. */
11778 static void
11779 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11781 int is_strlen = 0;
11782 tree len, size;
11783 location_t loc = tree_nonartificial_location (exp);
11785 switch (fcode)
11787 case BUILT_IN_STRCPY_CHK:
11788 case BUILT_IN_STPCPY_CHK:
11789 /* For __strcat_chk the warning will be emitted only if overflowing
11790 by at least strlen (dest) + 1 bytes. */
11791 case BUILT_IN_STRCAT_CHK:
11792 len = CALL_EXPR_ARG (exp, 1);
11793 size = CALL_EXPR_ARG (exp, 2);
11794 is_strlen = 1;
11795 break;
11796 case BUILT_IN_STRNCAT_CHK:
11797 case BUILT_IN_STRNCPY_CHK:
11798 len = CALL_EXPR_ARG (exp, 2);
11799 size = CALL_EXPR_ARG (exp, 3);
11800 break;
11801 case BUILT_IN_SNPRINTF_CHK:
11802 case BUILT_IN_VSNPRINTF_CHK:
11803 len = CALL_EXPR_ARG (exp, 1);
11804 size = CALL_EXPR_ARG (exp, 3);
11805 break;
11806 default:
11807 gcc_unreachable ();
11810 if (!len || !size)
11811 return;
11813 if (! host_integerp (size, 1) || integer_all_onesp (size))
11814 return;
11816 if (is_strlen)
11818 len = c_strlen (len, 1);
11819 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11820 return;
11822 else if (fcode == BUILT_IN_STRNCAT_CHK)
11824 tree src = CALL_EXPR_ARG (exp, 1);
11825 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11826 return;
11827 src = c_strlen (src, 1);
11828 if (! src || ! host_integerp (src, 1))
11830 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11831 exp, get_callee_fndecl (exp));
11832 return;
11834 else if (tree_int_cst_lt (src, size))
11835 return;
11837 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11838 return;
11840 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11841 exp, get_callee_fndecl (exp));
11844 /* Emit warning if a buffer overflow is detected at compile time
11845 in __sprintf_chk/__vsprintf_chk calls. */
11847 static void
11848 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11850 tree size, len, fmt;
11851 const char *fmt_str;
11852 int nargs = call_expr_nargs (exp);
11854 /* Verify the required arguments in the original call. */
11856 if (nargs < 4)
11857 return;
11858 size = CALL_EXPR_ARG (exp, 2);
11859 fmt = CALL_EXPR_ARG (exp, 3);
11861 if (! host_integerp (size, 1) || integer_all_onesp (size))
11862 return;
11864 /* Check whether the format is a literal string constant. */
11865 fmt_str = c_getstr (fmt);
11866 if (fmt_str == NULL)
11867 return;
11869 if (!init_target_chars ())
11870 return;
11872 /* If the format doesn't contain % args or %%, we know its size. */
11873 if (strchr (fmt_str, target_percent) == 0)
11874 len = build_int_cstu (size_type_node, strlen (fmt_str));
11875 /* If the format is "%s" and first ... argument is a string literal,
11876 we know it too. */
11877 else if (fcode == BUILT_IN_SPRINTF_CHK
11878 && strcmp (fmt_str, target_percent_s) == 0)
11880 tree arg;
11882 if (nargs < 5)
11883 return;
11884 arg = CALL_EXPR_ARG (exp, 4);
11885 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11886 return;
11888 len = c_strlen (arg, 1);
11889 if (!len || ! host_integerp (len, 1))
11890 return;
11892 else
11893 return;
11895 if (! tree_int_cst_lt (len, size))
11896 warning_at (tree_nonartificial_location (exp),
11897 0, "%Kcall to %D will always overflow destination buffer",
11898 exp, get_callee_fndecl (exp));
11901 /* Emit warning if a free is called with address of a variable. */
11903 static void
11904 maybe_emit_free_warning (tree exp)
11906 tree arg = CALL_EXPR_ARG (exp, 0);
11908 STRIP_NOPS (arg);
11909 if (TREE_CODE (arg) != ADDR_EXPR)
11910 return;
11912 arg = get_base_address (TREE_OPERAND (arg, 0));
11913 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11914 return;
11916 if (SSA_VAR_P (arg))
11917 warning_at (tree_nonartificial_location (exp),
11918 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11919 else
11920 warning_at (tree_nonartificial_location (exp),
11921 0, "%Kattempt to free a non-heap object", exp);
11924 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11925 if possible. */
11927 tree
11928 fold_builtin_object_size (tree ptr, tree ost)
11930 unsigned HOST_WIDE_INT bytes;
11931 int object_size_type;
11933 if (!validate_arg (ptr, POINTER_TYPE)
11934 || !validate_arg (ost, INTEGER_TYPE))
11935 return NULL_TREE;
11937 STRIP_NOPS (ost);
11939 if (TREE_CODE (ost) != INTEGER_CST
11940 || tree_int_cst_sgn (ost) < 0
11941 || compare_tree_int (ost, 3) > 0)
11942 return NULL_TREE;
11944 object_size_type = tree_low_cst (ost, 0);
11946 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11947 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11948 and (size_t) 0 for types 2 and 3. */
11949 if (TREE_SIDE_EFFECTS (ptr))
11950 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11952 if (TREE_CODE (ptr) == ADDR_EXPR)
11954 bytes = compute_builtin_object_size (ptr, object_size_type);
11955 if (double_int_fits_to_tree_p (size_type_node,
11956 uhwi_to_double_int (bytes)))
11957 return build_int_cstu (size_type_node, bytes);
11959 else if (TREE_CODE (ptr) == SSA_NAME)
11961 /* If object size is not known yet, delay folding until
11962 later. Maybe subsequent passes will help determining
11963 it. */
11964 bytes = compute_builtin_object_size (ptr, object_size_type);
11965 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11966 && double_int_fits_to_tree_p (size_type_node,
11967 uhwi_to_double_int (bytes)))
11968 return build_int_cstu (size_type_node, bytes);
11971 return NULL_TREE;
11974 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11975 DEST, SRC, LEN, and SIZE are the arguments to the call.
11976 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11977 code of the builtin. If MAXLEN is not NULL, it is maximum length
11978 passed as third argument. */
11980 tree
11981 fold_builtin_memory_chk (location_t loc, tree fndecl,
11982 tree dest, tree src, tree len, tree size,
11983 tree maxlen, bool ignore,
11984 enum built_in_function fcode)
11986 tree fn;
11988 if (!validate_arg (dest, POINTER_TYPE)
11989 || !validate_arg (src,
11990 (fcode == BUILT_IN_MEMSET_CHK
11991 ? INTEGER_TYPE : POINTER_TYPE))
11992 || !validate_arg (len, INTEGER_TYPE)
11993 || !validate_arg (size, INTEGER_TYPE))
11994 return NULL_TREE;
11996 /* If SRC and DEST are the same (and not volatile), return DEST
11997 (resp. DEST+LEN for __mempcpy_chk). */
11998 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12000 if (fcode != BUILT_IN_MEMPCPY_CHK)
12001 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12002 dest, len);
12003 else
12005 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12006 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12010 if (! host_integerp (size, 1))
12011 return NULL_TREE;
12013 if (! integer_all_onesp (size))
12015 if (! host_integerp (len, 1))
12017 /* If LEN is not constant, try MAXLEN too.
12018 For MAXLEN only allow optimizing into non-_ocs function
12019 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12020 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12022 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12024 /* (void) __mempcpy_chk () can be optimized into
12025 (void) __memcpy_chk (). */
12026 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12027 if (!fn)
12028 return NULL_TREE;
12030 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12032 return NULL_TREE;
12035 else
12036 maxlen = len;
12038 if (tree_int_cst_lt (size, maxlen))
12039 return NULL_TREE;
12042 fn = NULL_TREE;
12043 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12044 mem{cpy,pcpy,move,set} is available. */
12045 switch (fcode)
12047 case BUILT_IN_MEMCPY_CHK:
12048 fn = built_in_decls[BUILT_IN_MEMCPY];
12049 break;
12050 case BUILT_IN_MEMPCPY_CHK:
12051 fn = built_in_decls[BUILT_IN_MEMPCPY];
12052 break;
12053 case BUILT_IN_MEMMOVE_CHK:
12054 fn = built_in_decls[BUILT_IN_MEMMOVE];
12055 break;
12056 case BUILT_IN_MEMSET_CHK:
12057 fn = built_in_decls[BUILT_IN_MEMSET];
12058 break;
12059 default:
12060 break;
12063 if (!fn)
12064 return NULL_TREE;
12066 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12069 /* Fold a call to the __st[rp]cpy_chk builtin.
12070 DEST, SRC, and SIZE are the arguments to the call.
12071 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12072 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12073 strings passed as second argument. */
12075 tree
12076 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12077 tree src, tree size,
12078 tree maxlen, bool ignore,
12079 enum built_in_function fcode)
12081 tree len, fn;
12083 if (!validate_arg (dest, POINTER_TYPE)
12084 || !validate_arg (src, POINTER_TYPE)
12085 || !validate_arg (size, INTEGER_TYPE))
12086 return NULL_TREE;
12088 /* If SRC and DEST are the same (and not volatile), return DEST. */
12089 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12090 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12092 if (! host_integerp (size, 1))
12093 return NULL_TREE;
12095 if (! integer_all_onesp (size))
12097 len = c_strlen (src, 1);
12098 if (! len || ! host_integerp (len, 1))
12100 /* If LEN is not constant, try MAXLEN too.
12101 For MAXLEN only allow optimizing into non-_ocs function
12102 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12103 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12105 if (fcode == BUILT_IN_STPCPY_CHK)
12107 if (! ignore)
12108 return NULL_TREE;
12110 /* If return value of __stpcpy_chk is ignored,
12111 optimize into __strcpy_chk. */
12112 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12113 if (!fn)
12114 return NULL_TREE;
12116 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12119 if (! len || TREE_SIDE_EFFECTS (len))
12120 return NULL_TREE;
12122 /* If c_strlen returned something, but not a constant,
12123 transform __strcpy_chk into __memcpy_chk. */
12124 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12125 if (!fn)
12126 return NULL_TREE;
12128 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12129 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12130 build_call_expr_loc (loc, fn, 4,
12131 dest, src, len, size));
12134 else
12135 maxlen = len;
12137 if (! tree_int_cst_lt (maxlen, size))
12138 return NULL_TREE;
12141 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12142 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12143 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12144 if (!fn)
12145 return NULL_TREE;
12147 return build_call_expr_loc (loc, fn, 2, dest, src);
12150 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12151 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12152 length passed as third argument. */
12154 tree
12155 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12156 tree len, tree size, tree maxlen)
12158 tree fn;
12160 if (!validate_arg (dest, POINTER_TYPE)
12161 || !validate_arg (src, POINTER_TYPE)
12162 || !validate_arg (len, INTEGER_TYPE)
12163 || !validate_arg (size, INTEGER_TYPE))
12164 return NULL_TREE;
12166 if (! host_integerp (size, 1))
12167 return NULL_TREE;
12169 if (! integer_all_onesp (size))
12171 if (! host_integerp (len, 1))
12173 /* If LEN is not constant, try MAXLEN too.
12174 For MAXLEN only allow optimizing into non-_ocs function
12175 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12176 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12177 return NULL_TREE;
12179 else
12180 maxlen = len;
12182 if (tree_int_cst_lt (size, maxlen))
12183 return NULL_TREE;
12186 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12187 fn = built_in_decls[BUILT_IN_STRNCPY];
12188 if (!fn)
12189 return NULL_TREE;
12191 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12194 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12195 are the arguments to the call. */
12197 static tree
12198 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12199 tree src, tree size)
12201 tree fn;
12202 const char *p;
12204 if (!validate_arg (dest, POINTER_TYPE)
12205 || !validate_arg (src, POINTER_TYPE)
12206 || !validate_arg (size, INTEGER_TYPE))
12207 return NULL_TREE;
12209 p = c_getstr (src);
12210 /* If the SRC parameter is "", return DEST. */
12211 if (p && *p == '\0')
12212 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12214 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12215 return NULL_TREE;
12217 /* If __builtin_strcat_chk is used, assume strcat is available. */
12218 fn = built_in_decls[BUILT_IN_STRCAT];
12219 if (!fn)
12220 return NULL_TREE;
12222 return build_call_expr_loc (loc, fn, 2, dest, src);
12225 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12226 LEN, and SIZE. */
12228 static tree
12229 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12230 tree dest, tree src, tree len, tree size)
12232 tree fn;
12233 const char *p;
12235 if (!validate_arg (dest, POINTER_TYPE)
12236 || !validate_arg (src, POINTER_TYPE)
12237 || !validate_arg (size, INTEGER_TYPE)
12238 || !validate_arg (size, INTEGER_TYPE))
12239 return NULL_TREE;
12241 p = c_getstr (src);
12242 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12243 if (p && *p == '\0')
12244 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12245 else if (integer_zerop (len))
12246 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12248 if (! host_integerp (size, 1))
12249 return NULL_TREE;
12251 if (! integer_all_onesp (size))
12253 tree src_len = c_strlen (src, 1);
12254 if (src_len
12255 && host_integerp (src_len, 1)
12256 && host_integerp (len, 1)
12257 && ! tree_int_cst_lt (len, src_len))
12259 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12260 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12261 if (!fn)
12262 return NULL_TREE;
12264 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12266 return NULL_TREE;
12269 /* If __builtin_strncat_chk is used, assume strncat is available. */
12270 fn = built_in_decls[BUILT_IN_STRNCAT];
12271 if (!fn)
12272 return NULL_TREE;
12274 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12277 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12278 Return NULL_TREE if a normal call should be emitted rather than
12279 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12280 or BUILT_IN_VSPRINTF_CHK. */
12282 static tree
12283 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12284 enum built_in_function fcode)
12286 tree dest, size, len, fn, fmt, flag;
12287 const char *fmt_str;
12289 /* Verify the required arguments in the original call. */
12290 if (nargs < 4)
12291 return NULL_TREE;
12292 dest = args[0];
12293 if (!validate_arg (dest, POINTER_TYPE))
12294 return NULL_TREE;
12295 flag = args[1];
12296 if (!validate_arg (flag, INTEGER_TYPE))
12297 return NULL_TREE;
12298 size = args[2];
12299 if (!validate_arg (size, INTEGER_TYPE))
12300 return NULL_TREE;
12301 fmt = args[3];
12302 if (!validate_arg (fmt, POINTER_TYPE))
12303 return NULL_TREE;
12305 if (! host_integerp (size, 1))
12306 return NULL_TREE;
12308 len = NULL_TREE;
12310 if (!init_target_chars ())
12311 return NULL_TREE;
12313 /* Check whether the format is a literal string constant. */
12314 fmt_str = c_getstr (fmt);
12315 if (fmt_str != NULL)
12317 /* If the format doesn't contain % args or %%, we know the size. */
12318 if (strchr (fmt_str, target_percent) == 0)
12320 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12321 len = build_int_cstu (size_type_node, strlen (fmt_str));
12323 /* If the format is "%s" and first ... argument is a string literal,
12324 we know the size too. */
12325 else if (fcode == BUILT_IN_SPRINTF_CHK
12326 && strcmp (fmt_str, target_percent_s) == 0)
12328 tree arg;
12330 if (nargs == 5)
12332 arg = args[4];
12333 if (validate_arg (arg, POINTER_TYPE))
12335 len = c_strlen (arg, 1);
12336 if (! len || ! host_integerp (len, 1))
12337 len = NULL_TREE;
12343 if (! integer_all_onesp (size))
12345 if (! len || ! tree_int_cst_lt (len, size))
12346 return NULL_TREE;
12349 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12350 or if format doesn't contain % chars or is "%s". */
12351 if (! integer_zerop (flag))
12353 if (fmt_str == NULL)
12354 return NULL_TREE;
12355 if (strchr (fmt_str, target_percent) != NULL
12356 && strcmp (fmt_str, target_percent_s))
12357 return NULL_TREE;
12360 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12361 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12362 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12363 if (!fn)
12364 return NULL_TREE;
12366 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
12369 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12370 a normal call should be emitted rather than expanding the function
12371 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12373 static tree
12374 fold_builtin_sprintf_chk (location_t loc, tree exp,
12375 enum built_in_function fcode)
12377 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
12378 CALL_EXPR_ARGP (exp), fcode);
12381 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
12382 NULL_TREE if a normal call should be emitted rather than expanding
12383 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12384 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12385 passed as second argument. */
12387 static tree
12388 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
12389 tree maxlen, enum built_in_function fcode)
12391 tree dest, size, len, fn, fmt, flag;
12392 const char *fmt_str;
12394 /* Verify the required arguments in the original call. */
12395 if (nargs < 5)
12396 return NULL_TREE;
12397 dest = args[0];
12398 if (!validate_arg (dest, POINTER_TYPE))
12399 return NULL_TREE;
12400 len = args[1];
12401 if (!validate_arg (len, INTEGER_TYPE))
12402 return NULL_TREE;
12403 flag = args[2];
12404 if (!validate_arg (flag, INTEGER_TYPE))
12405 return NULL_TREE;
12406 size = args[3];
12407 if (!validate_arg (size, INTEGER_TYPE))
12408 return NULL_TREE;
12409 fmt = args[4];
12410 if (!validate_arg (fmt, POINTER_TYPE))
12411 return NULL_TREE;
12413 if (! host_integerp (size, 1))
12414 return NULL_TREE;
12416 if (! integer_all_onesp (size))
12418 if (! host_integerp (len, 1))
12420 /* If LEN is not constant, try MAXLEN too.
12421 For MAXLEN only allow optimizing into non-_ocs function
12422 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12423 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12424 return NULL_TREE;
12426 else
12427 maxlen = len;
12429 if (tree_int_cst_lt (size, maxlen))
12430 return NULL_TREE;
12433 if (!init_target_chars ())
12434 return NULL_TREE;
12436 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12437 or if format doesn't contain % chars or is "%s". */
12438 if (! integer_zerop (flag))
12440 fmt_str = c_getstr (fmt);
12441 if (fmt_str == NULL)
12442 return NULL_TREE;
12443 if (strchr (fmt_str, target_percent) != NULL
12444 && strcmp (fmt_str, target_percent_s))
12445 return NULL_TREE;
12448 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12449 available. */
12450 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12451 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12452 if (!fn)
12453 return NULL_TREE;
12455 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
12458 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12459 a normal call should be emitted rather than expanding the function
12460 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12461 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12462 passed as second argument. */
12464 tree
12465 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12466 enum built_in_function fcode)
12468 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
12469 CALL_EXPR_ARGP (exp), maxlen, fcode);
12472 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12473 FMT and ARG are the arguments to the call; we don't fold cases with
12474 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12476 Return NULL_TREE if no simplification was possible, otherwise return the
12477 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12478 code of the function to be simplified. */
12480 static tree
12481 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12482 tree arg, bool ignore,
12483 enum built_in_function fcode)
12485 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12486 const char *fmt_str = NULL;
12488 /* If the return value is used, don't do the transformation. */
12489 if (! ignore)
12490 return NULL_TREE;
12492 /* Verify the required arguments in the original call. */
12493 if (!validate_arg (fmt, POINTER_TYPE))
12494 return NULL_TREE;
12496 /* Check whether the format is a literal string constant. */
12497 fmt_str = c_getstr (fmt);
12498 if (fmt_str == NULL)
12499 return NULL_TREE;
12501 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12503 /* If we're using an unlocked function, assume the other
12504 unlocked functions exist explicitly. */
12505 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12506 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12508 else
12510 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12511 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12514 if (!init_target_chars ())
12515 return NULL_TREE;
12517 if (strcmp (fmt_str, target_percent_s) == 0
12518 || strchr (fmt_str, target_percent) == NULL)
12520 const char *str;
12522 if (strcmp (fmt_str, target_percent_s) == 0)
12524 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12525 return NULL_TREE;
12527 if (!arg || !validate_arg (arg, POINTER_TYPE))
12528 return NULL_TREE;
12530 str = c_getstr (arg);
12531 if (str == NULL)
12532 return NULL_TREE;
12534 else
12536 /* The format specifier doesn't contain any '%' characters. */
12537 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12538 && arg)
12539 return NULL_TREE;
12540 str = fmt_str;
12543 /* If the string was "", printf does nothing. */
12544 if (str[0] == '\0')
12545 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12547 /* If the string has length of 1, call putchar. */
12548 if (str[1] == '\0')
12550 /* Given printf("c"), (where c is any one character,)
12551 convert "c"[0] to an int and pass that to the replacement
12552 function. */
12553 newarg = build_int_cst (integer_type_node, str[0]);
12554 if (fn_putchar)
12555 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12557 else
12559 /* If the string was "string\n", call puts("string"). */
12560 size_t len = strlen (str);
12561 if ((unsigned char)str[len - 1] == target_newline
12562 && (size_t) (int) len == len
12563 && (int) len > 0)
12565 char *newstr;
12566 tree offset_node, string_cst;
12568 /* Create a NUL-terminated string that's one char shorter
12569 than the original, stripping off the trailing '\n'. */
12570 newarg = build_string_literal (len, str);
12571 string_cst = string_constant (newarg, &offset_node);
12572 gcc_checking_assert (string_cst
12573 && (TREE_STRING_LENGTH (string_cst)
12574 == (int) len)
12575 && integer_zerop (offset_node)
12576 && (unsigned char)
12577 TREE_STRING_POINTER (string_cst)[len - 1]
12578 == target_newline);
12579 /* build_string_literal creates a new STRING_CST,
12580 modify it in place to avoid double copying. */
12581 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
12582 newstr[len - 1] = '\0';
12583 if (fn_puts)
12584 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12586 else
12587 /* We'd like to arrange to call fputs(string,stdout) here,
12588 but we need stdout and don't have a way to get it yet. */
12589 return NULL_TREE;
12593 /* The other optimizations can be done only on the non-va_list variants. */
12594 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12595 return NULL_TREE;
12597 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12598 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12600 if (!arg || !validate_arg (arg, POINTER_TYPE))
12601 return NULL_TREE;
12602 if (fn_puts)
12603 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12606 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12607 else if (strcmp (fmt_str, target_percent_c) == 0)
12609 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12610 return NULL_TREE;
12611 if (fn_putchar)
12612 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12615 if (!call)
12616 return NULL_TREE;
12618 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12621 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12622 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12623 more than 3 arguments, and ARG may be null in the 2-argument case.
12625 Return NULL_TREE if no simplification was possible, otherwise return the
12626 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12627 code of the function to be simplified. */
12629 static tree
12630 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12631 tree fmt, tree arg, bool ignore,
12632 enum built_in_function fcode)
12634 tree fn_fputc, fn_fputs, call = NULL_TREE;
12635 const char *fmt_str = NULL;
12637 /* If the return value is used, don't do the transformation. */
12638 if (! ignore)
12639 return NULL_TREE;
12641 /* Verify the required arguments in the original call. */
12642 if (!validate_arg (fp, POINTER_TYPE))
12643 return NULL_TREE;
12644 if (!validate_arg (fmt, POINTER_TYPE))
12645 return NULL_TREE;
12647 /* Check whether the format is a literal string constant. */
12648 fmt_str = c_getstr (fmt);
12649 if (fmt_str == NULL)
12650 return NULL_TREE;
12652 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12654 /* If we're using an unlocked function, assume the other
12655 unlocked functions exist explicitly. */
12656 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12657 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12659 else
12661 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12662 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12665 if (!init_target_chars ())
12666 return NULL_TREE;
12668 /* If the format doesn't contain % args or %%, use strcpy. */
12669 if (strchr (fmt_str, target_percent) == NULL)
12671 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12672 && arg)
12673 return NULL_TREE;
12675 /* If the format specifier was "", fprintf does nothing. */
12676 if (fmt_str[0] == '\0')
12678 /* If FP has side-effects, just wait until gimplification is
12679 done. */
12680 if (TREE_SIDE_EFFECTS (fp))
12681 return NULL_TREE;
12683 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12686 /* When "string" doesn't contain %, replace all cases of
12687 fprintf (fp, string) with fputs (string, fp). The fputs
12688 builtin will take care of special cases like length == 1. */
12689 if (fn_fputs)
12690 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12693 /* The other optimizations can be done only on the non-va_list variants. */
12694 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12695 return NULL_TREE;
12697 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12698 else if (strcmp (fmt_str, target_percent_s) == 0)
12700 if (!arg || !validate_arg (arg, POINTER_TYPE))
12701 return NULL_TREE;
12702 if (fn_fputs)
12703 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12706 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12707 else if (strcmp (fmt_str, target_percent_c) == 0)
12709 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12710 return NULL_TREE;
12711 if (fn_fputc)
12712 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12715 if (!call)
12716 return NULL_TREE;
12717 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12720 /* Initialize format string characters in the target charset. */
12722 static bool
12723 init_target_chars (void)
12725 static bool init;
12726 if (!init)
12728 target_newline = lang_hooks.to_target_charset ('\n');
12729 target_percent = lang_hooks.to_target_charset ('%');
12730 target_c = lang_hooks.to_target_charset ('c');
12731 target_s = lang_hooks.to_target_charset ('s');
12732 if (target_newline == 0 || target_percent == 0 || target_c == 0
12733 || target_s == 0)
12734 return false;
12736 target_percent_c[0] = target_percent;
12737 target_percent_c[1] = target_c;
12738 target_percent_c[2] = '\0';
12740 target_percent_s[0] = target_percent;
12741 target_percent_s[1] = target_s;
12742 target_percent_s[2] = '\0';
12744 target_percent_s_newline[0] = target_percent;
12745 target_percent_s_newline[1] = target_s;
12746 target_percent_s_newline[2] = target_newline;
12747 target_percent_s_newline[3] = '\0';
12749 init = true;
12751 return true;
12754 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12755 and no overflow/underflow occurred. INEXACT is true if M was not
12756 exactly calculated. TYPE is the tree type for the result. This
12757 function assumes that you cleared the MPFR flags and then
12758 calculated M to see if anything subsequently set a flag prior to
12759 entering this function. Return NULL_TREE if any checks fail. */
12761 static tree
12762 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12764 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12765 overflow/underflow occurred. If -frounding-math, proceed iff the
12766 result of calling FUNC was exact. */
12767 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12768 && (!flag_rounding_math || !inexact))
12770 REAL_VALUE_TYPE rr;
12772 real_from_mpfr (&rr, m, type, GMP_RNDN);
12773 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12774 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12775 but the mpft_t is not, then we underflowed in the
12776 conversion. */
12777 if (real_isfinite (&rr)
12778 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12780 REAL_VALUE_TYPE rmode;
12782 real_convert (&rmode, TYPE_MODE (type), &rr);
12783 /* Proceed iff the specified mode can hold the value. */
12784 if (real_identical (&rmode, &rr))
12785 return build_real (type, rmode);
12788 return NULL_TREE;
12791 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12792 number and no overflow/underflow occurred. INEXACT is true if M
12793 was not exactly calculated. TYPE is the tree type for the result.
12794 This function assumes that you cleared the MPFR flags and then
12795 calculated M to see if anything subsequently set a flag prior to
12796 entering this function. Return NULL_TREE if any checks fail, if
12797 FORCE_CONVERT is true, then bypass the checks. */
12799 static tree
12800 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12802 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12803 overflow/underflow occurred. If -frounding-math, proceed iff the
12804 result of calling FUNC was exact. */
12805 if (force_convert
12806 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12807 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12808 && (!flag_rounding_math || !inexact)))
12810 REAL_VALUE_TYPE re, im;
12812 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12813 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12814 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12815 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12816 but the mpft_t is not, then we underflowed in the
12817 conversion. */
12818 if (force_convert
12819 || (real_isfinite (&re) && real_isfinite (&im)
12820 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12821 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12823 REAL_VALUE_TYPE re_mode, im_mode;
12825 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12826 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12827 /* Proceed iff the specified mode can hold the value. */
12828 if (force_convert
12829 || (real_identical (&re_mode, &re)
12830 && real_identical (&im_mode, &im)))
12831 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12832 build_real (TREE_TYPE (type), im_mode));
12835 return NULL_TREE;
12838 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12839 FUNC on it and return the resulting value as a tree with type TYPE.
12840 If MIN and/or MAX are not NULL, then the supplied ARG must be
12841 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12842 acceptable values, otherwise they are not. The mpfr precision is
12843 set to the precision of TYPE. We assume that function FUNC returns
12844 zero if the result could be calculated exactly within the requested
12845 precision. */
12847 static tree
12848 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12849 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12850 bool inclusive)
12852 tree result = NULL_TREE;
12854 STRIP_NOPS (arg);
12856 /* To proceed, MPFR must exactly represent the target floating point
12857 format, which only happens when the target base equals two. */
12858 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12859 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12861 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12863 if (real_isfinite (ra)
12864 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12865 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12867 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12868 const int prec = fmt->p;
12869 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12870 int inexact;
12871 mpfr_t m;
12873 mpfr_init2 (m, prec);
12874 mpfr_from_real (m, ra, GMP_RNDN);
12875 mpfr_clear_flags ();
12876 inexact = func (m, m, rnd);
12877 result = do_mpfr_ckconv (m, type, inexact);
12878 mpfr_clear (m);
12882 return result;
12885 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12886 FUNC on it and return the resulting value as a tree with type TYPE.
12887 The mpfr precision is set to the precision of TYPE. We assume that
12888 function FUNC returns zero if the result could be calculated
12889 exactly within the requested precision. */
12891 static tree
12892 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12893 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12895 tree result = NULL_TREE;
12897 STRIP_NOPS (arg1);
12898 STRIP_NOPS (arg2);
12900 /* To proceed, MPFR must exactly represent the target floating point
12901 format, which only happens when the target base equals two. */
12902 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12903 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12904 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12906 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12907 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12909 if (real_isfinite (ra1) && real_isfinite (ra2))
12911 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12912 const int prec = fmt->p;
12913 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12914 int inexact;
12915 mpfr_t m1, m2;
12917 mpfr_inits2 (prec, m1, m2, NULL);
12918 mpfr_from_real (m1, ra1, GMP_RNDN);
12919 mpfr_from_real (m2, ra2, GMP_RNDN);
12920 mpfr_clear_flags ();
12921 inexact = func (m1, m1, m2, rnd);
12922 result = do_mpfr_ckconv (m1, type, inexact);
12923 mpfr_clears (m1, m2, NULL);
12927 return result;
12930 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12931 FUNC on it and return the resulting value as a tree with type TYPE.
12932 The mpfr precision is set to the precision of TYPE. We assume that
12933 function FUNC returns zero if the result could be calculated
12934 exactly within the requested precision. */
12936 static tree
12937 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12938 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12940 tree result = NULL_TREE;
12942 STRIP_NOPS (arg1);
12943 STRIP_NOPS (arg2);
12944 STRIP_NOPS (arg3);
12946 /* To proceed, MPFR must exactly represent the target floating point
12947 format, which only happens when the target base equals two. */
12948 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12949 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12950 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12951 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12953 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12954 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12955 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12957 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12959 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12960 const int prec = fmt->p;
12961 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12962 int inexact;
12963 mpfr_t m1, m2, m3;
12965 mpfr_inits2 (prec, m1, m2, m3, NULL);
12966 mpfr_from_real (m1, ra1, GMP_RNDN);
12967 mpfr_from_real (m2, ra2, GMP_RNDN);
12968 mpfr_from_real (m3, ra3, GMP_RNDN);
12969 mpfr_clear_flags ();
12970 inexact = func (m1, m1, m2, m3, rnd);
12971 result = do_mpfr_ckconv (m1, type, inexact);
12972 mpfr_clears (m1, m2, m3, NULL);
12976 return result;
12979 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12980 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12981 If ARG_SINP and ARG_COSP are NULL then the result is returned
12982 as a complex value.
12983 The type is taken from the type of ARG and is used for setting the
12984 precision of the calculation and results. */
12986 static tree
12987 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12989 tree const type = TREE_TYPE (arg);
12990 tree result = NULL_TREE;
12992 STRIP_NOPS (arg);
12994 /* To proceed, MPFR must exactly represent the target floating point
12995 format, which only happens when the target base equals two. */
12996 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12997 && TREE_CODE (arg) == REAL_CST
12998 && !TREE_OVERFLOW (arg))
13000 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13002 if (real_isfinite (ra))
13004 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13005 const int prec = fmt->p;
13006 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13007 tree result_s, result_c;
13008 int inexact;
13009 mpfr_t m, ms, mc;
13011 mpfr_inits2 (prec, m, ms, mc, NULL);
13012 mpfr_from_real (m, ra, GMP_RNDN);
13013 mpfr_clear_flags ();
13014 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13015 result_s = do_mpfr_ckconv (ms, type, inexact);
13016 result_c = do_mpfr_ckconv (mc, type, inexact);
13017 mpfr_clears (m, ms, mc, NULL);
13018 if (result_s && result_c)
13020 /* If we are to return in a complex value do so. */
13021 if (!arg_sinp && !arg_cosp)
13022 return build_complex (build_complex_type (type),
13023 result_c, result_s);
13025 /* Dereference the sin/cos pointer arguments. */
13026 arg_sinp = build_fold_indirect_ref (arg_sinp);
13027 arg_cosp = build_fold_indirect_ref (arg_cosp);
13028 /* Proceed if valid pointer type were passed in. */
13029 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13030 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13032 /* Set the values. */
13033 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13034 result_s);
13035 TREE_SIDE_EFFECTS (result_s) = 1;
13036 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13037 result_c);
13038 TREE_SIDE_EFFECTS (result_c) = 1;
13039 /* Combine the assignments into a compound expr. */
13040 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13041 result_s, result_c));
13046 return result;
13049 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13050 two-argument mpfr order N Bessel function FUNC on them and return
13051 the resulting value as a tree with type TYPE. The mpfr precision
13052 is set to the precision of TYPE. We assume that function FUNC
13053 returns zero if the result could be calculated exactly within the
13054 requested precision. */
13055 static tree
13056 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13057 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13058 const REAL_VALUE_TYPE *min, bool inclusive)
13060 tree result = NULL_TREE;
13062 STRIP_NOPS (arg1);
13063 STRIP_NOPS (arg2);
13065 /* To proceed, MPFR must exactly represent the target floating point
13066 format, which only happens when the target base equals two. */
13067 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13068 && host_integerp (arg1, 0)
13069 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13071 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13072 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13074 if (n == (long)n
13075 && real_isfinite (ra)
13076 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13078 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13079 const int prec = fmt->p;
13080 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13081 int inexact;
13082 mpfr_t m;
13084 mpfr_init2 (m, prec);
13085 mpfr_from_real (m, ra, GMP_RNDN);
13086 mpfr_clear_flags ();
13087 inexact = func (m, n, m, rnd);
13088 result = do_mpfr_ckconv (m, type, inexact);
13089 mpfr_clear (m);
13093 return result;
13096 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13097 the pointer *(ARG_QUO) and return the result. The type is taken
13098 from the type of ARG0 and is used for setting the precision of the
13099 calculation and results. */
13101 static tree
13102 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13104 tree const type = TREE_TYPE (arg0);
13105 tree result = NULL_TREE;
13107 STRIP_NOPS (arg0);
13108 STRIP_NOPS (arg1);
13110 /* To proceed, MPFR must exactly represent the target floating point
13111 format, which only happens when the target base equals two. */
13112 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13113 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13114 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13116 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13117 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13119 if (real_isfinite (ra0) && real_isfinite (ra1))
13121 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13122 const int prec = fmt->p;
13123 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13124 tree result_rem;
13125 long integer_quo;
13126 mpfr_t m0, m1;
13128 mpfr_inits2 (prec, m0, m1, NULL);
13129 mpfr_from_real (m0, ra0, GMP_RNDN);
13130 mpfr_from_real (m1, ra1, GMP_RNDN);
13131 mpfr_clear_flags ();
13132 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13133 /* Remquo is independent of the rounding mode, so pass
13134 inexact=0 to do_mpfr_ckconv(). */
13135 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13136 mpfr_clears (m0, m1, NULL);
13137 if (result_rem)
13139 /* MPFR calculates quo in the host's long so it may
13140 return more bits in quo than the target int can hold
13141 if sizeof(host long) > sizeof(target int). This can
13142 happen even for native compilers in LP64 mode. In
13143 these cases, modulo the quo value with the largest
13144 number that the target int can hold while leaving one
13145 bit for the sign. */
13146 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13147 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13149 /* Dereference the quo pointer argument. */
13150 arg_quo = build_fold_indirect_ref (arg_quo);
13151 /* Proceed iff a valid pointer type was passed in. */
13152 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13154 /* Set the value. */
13155 tree result_quo
13156 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13157 build_int_cst (TREE_TYPE (arg_quo),
13158 integer_quo));
13159 TREE_SIDE_EFFECTS (result_quo) = 1;
13160 /* Combine the quo assignment with the rem. */
13161 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13162 result_quo, result_rem));
13167 return result;
13170 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13171 resulting value as a tree with type TYPE. The mpfr precision is
13172 set to the precision of TYPE. We assume that this mpfr function
13173 returns zero if the result could be calculated exactly within the
13174 requested precision. In addition, the integer pointer represented
13175 by ARG_SG will be dereferenced and set to the appropriate signgam
13176 (-1,1) value. */
13178 static tree
13179 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13181 tree result = NULL_TREE;
13183 STRIP_NOPS (arg);
13185 /* To proceed, MPFR must exactly represent the target floating point
13186 format, which only happens when the target base equals two. Also
13187 verify ARG is a constant and that ARG_SG is an int pointer. */
13188 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13189 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13190 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13191 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13193 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13195 /* In addition to NaN and Inf, the argument cannot be zero or a
13196 negative integer. */
13197 if (real_isfinite (ra)
13198 && ra->cl != rvc_zero
13199 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13201 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13202 const int prec = fmt->p;
13203 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13204 int inexact, sg;
13205 mpfr_t m;
13206 tree result_lg;
13208 mpfr_init2 (m, prec);
13209 mpfr_from_real (m, ra, GMP_RNDN);
13210 mpfr_clear_flags ();
13211 inexact = mpfr_lgamma (m, &sg, m, rnd);
13212 result_lg = do_mpfr_ckconv (m, type, inexact);
13213 mpfr_clear (m);
13214 if (result_lg)
13216 tree result_sg;
13218 /* Dereference the arg_sg pointer argument. */
13219 arg_sg = build_fold_indirect_ref (arg_sg);
13220 /* Assign the signgam value into *arg_sg. */
13221 result_sg = fold_build2 (MODIFY_EXPR,
13222 TREE_TYPE (arg_sg), arg_sg,
13223 build_int_cst (TREE_TYPE (arg_sg), sg));
13224 TREE_SIDE_EFFECTS (result_sg) = 1;
13225 /* Combine the signgam assignment with the lgamma result. */
13226 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13227 result_sg, result_lg));
13232 return result;
13235 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13236 function FUNC on it and return the resulting value as a tree with
13237 type TYPE. The mpfr precision is set to the precision of TYPE. We
13238 assume that function FUNC returns zero if the result could be
13239 calculated exactly within the requested precision. */
13241 static tree
13242 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13244 tree result = NULL_TREE;
13246 STRIP_NOPS (arg);
13248 /* To proceed, MPFR must exactly represent the target floating point
13249 format, which only happens when the target base equals two. */
13250 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13251 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13252 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13254 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13255 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13257 if (real_isfinite (re) && real_isfinite (im))
13259 const struct real_format *const fmt =
13260 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13261 const int prec = fmt->p;
13262 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13263 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13264 int inexact;
13265 mpc_t m;
13267 mpc_init2 (m, prec);
13268 mpfr_from_real (mpc_realref(m), re, rnd);
13269 mpfr_from_real (mpc_imagref(m), im, rnd);
13270 mpfr_clear_flags ();
13271 inexact = func (m, m, crnd);
13272 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13273 mpc_clear (m);
13277 return result;
13280 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13281 mpc function FUNC on it and return the resulting value as a tree
13282 with type TYPE. The mpfr precision is set to the precision of
13283 TYPE. We assume that function FUNC returns zero if the result
13284 could be calculated exactly within the requested precision. If
13285 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13286 in the arguments and/or results. */
13288 tree
13289 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13290 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13292 tree result = NULL_TREE;
13294 STRIP_NOPS (arg0);
13295 STRIP_NOPS (arg1);
13297 /* To proceed, MPFR must exactly represent the target floating point
13298 format, which only happens when the target base equals two. */
13299 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13300 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13301 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13302 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13303 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13305 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13306 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13307 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13308 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13310 if (do_nonfinite
13311 || (real_isfinite (re0) && real_isfinite (im0)
13312 && real_isfinite (re1) && real_isfinite (im1)))
13314 const struct real_format *const fmt =
13315 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13316 const int prec = fmt->p;
13317 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13318 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13319 int inexact;
13320 mpc_t m0, m1;
13322 mpc_init2 (m0, prec);
13323 mpc_init2 (m1, prec);
13324 mpfr_from_real (mpc_realref(m0), re0, rnd);
13325 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13326 mpfr_from_real (mpc_realref(m1), re1, rnd);
13327 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13328 mpfr_clear_flags ();
13329 inexact = func (m0, m0, m1, crnd);
13330 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13331 mpc_clear (m0);
13332 mpc_clear (m1);
13336 return result;
13339 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13340 a normal call should be emitted rather than expanding the function
13341 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13343 static tree
13344 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13346 int nargs = gimple_call_num_args (stmt);
13348 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
13349 (nargs > 0
13350 ? gimple_call_arg_ptr (stmt, 0)
13351 : &error_mark_node), fcode);
13354 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13355 a normal call should be emitted rather than expanding the function
13356 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13357 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13358 passed as second argument. */
13360 tree
13361 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13362 enum built_in_function fcode)
13364 int nargs = gimple_call_num_args (stmt);
13366 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
13367 (nargs > 0
13368 ? gimple_call_arg_ptr (stmt, 0)
13369 : &error_mark_node), maxlen, fcode);
13372 /* Builtins with folding operations that operate on "..." arguments
13373 need special handling; we need to store the arguments in a convenient
13374 data structure before attempting any folding. Fortunately there are
13375 only a few builtins that fall into this category. FNDECL is the
13376 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13377 result of the function call is ignored. */
13379 static tree
13380 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13381 bool ignore ATTRIBUTE_UNUSED)
13383 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13384 tree ret = NULL_TREE;
13386 switch (fcode)
13388 case BUILT_IN_SPRINTF_CHK:
13389 case BUILT_IN_VSPRINTF_CHK:
13390 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13391 break;
13393 case BUILT_IN_SNPRINTF_CHK:
13394 case BUILT_IN_VSNPRINTF_CHK:
13395 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13397 default:
13398 break;
13400 if (ret)
13402 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13403 TREE_NO_WARNING (ret) = 1;
13404 return ret;
13406 return NULL_TREE;
13409 /* A wrapper function for builtin folding that prevents warnings for
13410 "statement without effect" and the like, caused by removing the
13411 call node earlier than the warning is generated. */
13413 tree
13414 fold_call_stmt (gimple stmt, bool ignore)
13416 tree ret = NULL_TREE;
13417 tree fndecl = gimple_call_fndecl (stmt);
13418 location_t loc = gimple_location (stmt);
13419 if (fndecl
13420 && TREE_CODE (fndecl) == FUNCTION_DECL
13421 && DECL_BUILT_IN (fndecl)
13422 && !gimple_call_va_arg_pack_p (stmt))
13424 int nargs = gimple_call_num_args (stmt);
13425 tree *args = (nargs > 0
13426 ? gimple_call_arg_ptr (stmt, 0)
13427 : &error_mark_node);
13429 if (avoid_folding_inline_builtin (fndecl))
13430 return NULL_TREE;
13431 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13433 return targetm.fold_builtin (fndecl, nargs, args, ignore);
13435 else
13437 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13438 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13439 if (!ret)
13440 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13441 if (ret)
13443 /* Propagate location information from original call to
13444 expansion of builtin. Otherwise things like
13445 maybe_emit_chk_warning, that operate on the expansion
13446 of a builtin, will use the wrong location information. */
13447 if (gimple_has_location (stmt))
13449 tree realret = ret;
13450 if (TREE_CODE (ret) == NOP_EXPR)
13451 realret = TREE_OPERAND (ret, 0);
13452 if (CAN_HAVE_LOCATION_P (realret)
13453 && !EXPR_HAS_LOCATION (realret))
13454 SET_EXPR_LOCATION (realret, loc);
13455 return realret;
13457 return ret;
13461 return NULL_TREE;
13464 /* Look up the function in built_in_decls that corresponds to DECL
13465 and set ASMSPEC as its user assembler name. DECL must be a
13466 function decl that declares a builtin. */
13468 void
13469 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13471 tree builtin;
13472 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13473 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13474 && asmspec != 0);
13476 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13477 set_user_assembler_name (builtin, asmspec);
13478 switch (DECL_FUNCTION_CODE (decl))
13480 case BUILT_IN_MEMCPY:
13481 init_block_move_fn (asmspec);
13482 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13483 break;
13484 case BUILT_IN_MEMSET:
13485 init_block_clear_fn (asmspec);
13486 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13487 break;
13488 case BUILT_IN_MEMMOVE:
13489 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13490 break;
13491 case BUILT_IN_MEMCMP:
13492 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13493 break;
13494 case BUILT_IN_ABORT:
13495 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13496 break;
13497 case BUILT_IN_FFS:
13498 if (INT_TYPE_SIZE < BITS_PER_WORD)
13500 set_user_assembler_libfunc ("ffs", asmspec);
13501 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13502 MODE_INT, 0), "ffs");
13504 break;
13505 default:
13506 break;
13510 /* Return true if DECL is a builtin that expands to a constant or similarly
13511 simple code. */
13512 bool
13513 is_simple_builtin (tree decl)
13515 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13516 switch (DECL_FUNCTION_CODE (decl))
13518 /* Builtins that expand to constants. */
13519 case BUILT_IN_CONSTANT_P:
13520 case BUILT_IN_EXPECT:
13521 case BUILT_IN_OBJECT_SIZE:
13522 case BUILT_IN_UNREACHABLE:
13523 /* Simple register moves or loads from stack. */
13524 case BUILT_IN_ASSUME_ALIGNED:
13525 case BUILT_IN_RETURN_ADDRESS:
13526 case BUILT_IN_EXTRACT_RETURN_ADDR:
13527 case BUILT_IN_FROB_RETURN_ADDR:
13528 case BUILT_IN_RETURN:
13529 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13530 case BUILT_IN_FRAME_ADDRESS:
13531 case BUILT_IN_VA_END:
13532 case BUILT_IN_STACK_SAVE:
13533 case BUILT_IN_STACK_RESTORE:
13534 /* Exception state returns or moves registers around. */
13535 case BUILT_IN_EH_FILTER:
13536 case BUILT_IN_EH_POINTER:
13537 case BUILT_IN_EH_COPY_VALUES:
13538 return true;
13540 default:
13541 return false;
13544 return false;
13547 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13548 most probably expanded inline into reasonably simple code. This is a
13549 superset of is_simple_builtin. */
13550 bool
13551 is_inexpensive_builtin (tree decl)
13553 if (!decl)
13554 return false;
13555 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13556 return true;
13557 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13558 switch (DECL_FUNCTION_CODE (decl))
13560 case BUILT_IN_ABS:
13561 case BUILT_IN_ALLOCA:
13562 case BUILT_IN_BSWAP32:
13563 case BUILT_IN_BSWAP64:
13564 case BUILT_IN_CLZ:
13565 case BUILT_IN_CLZIMAX:
13566 case BUILT_IN_CLZL:
13567 case BUILT_IN_CLZLL:
13568 case BUILT_IN_CTZ:
13569 case BUILT_IN_CTZIMAX:
13570 case BUILT_IN_CTZL:
13571 case BUILT_IN_CTZLL:
13572 case BUILT_IN_FFS:
13573 case BUILT_IN_FFSIMAX:
13574 case BUILT_IN_FFSL:
13575 case BUILT_IN_FFSLL:
13576 case BUILT_IN_IMAXABS:
13577 case BUILT_IN_FINITE:
13578 case BUILT_IN_FINITEF:
13579 case BUILT_IN_FINITEL:
13580 case BUILT_IN_FINITED32:
13581 case BUILT_IN_FINITED64:
13582 case BUILT_IN_FINITED128:
13583 case BUILT_IN_FPCLASSIFY:
13584 case BUILT_IN_ISFINITE:
13585 case BUILT_IN_ISINF_SIGN:
13586 case BUILT_IN_ISINF:
13587 case BUILT_IN_ISINFF:
13588 case BUILT_IN_ISINFL:
13589 case BUILT_IN_ISINFD32:
13590 case BUILT_IN_ISINFD64:
13591 case BUILT_IN_ISINFD128:
13592 case BUILT_IN_ISNAN:
13593 case BUILT_IN_ISNANF:
13594 case BUILT_IN_ISNANL:
13595 case BUILT_IN_ISNAND32:
13596 case BUILT_IN_ISNAND64:
13597 case BUILT_IN_ISNAND128:
13598 case BUILT_IN_ISNORMAL:
13599 case BUILT_IN_ISGREATER:
13600 case BUILT_IN_ISGREATEREQUAL:
13601 case BUILT_IN_ISLESS:
13602 case BUILT_IN_ISLESSEQUAL:
13603 case BUILT_IN_ISLESSGREATER:
13604 case BUILT_IN_ISUNORDERED:
13605 case BUILT_IN_VA_ARG_PACK:
13606 case BUILT_IN_VA_ARG_PACK_LEN:
13607 case BUILT_IN_VA_COPY:
13608 case BUILT_IN_TRAP:
13609 case BUILT_IN_SAVEREGS:
13610 case BUILT_IN_POPCOUNTL:
13611 case BUILT_IN_POPCOUNTLL:
13612 case BUILT_IN_POPCOUNTIMAX:
13613 case BUILT_IN_POPCOUNT:
13614 case BUILT_IN_PARITYL:
13615 case BUILT_IN_PARITYLL:
13616 case BUILT_IN_PARITYIMAX:
13617 case BUILT_IN_PARITY:
13618 case BUILT_IN_LABS:
13619 case BUILT_IN_LLABS:
13620 case BUILT_IN_PREFETCH:
13621 return true;
13623 default:
13624 return is_simple_builtin (decl);
13627 return false;