Merge from mainline (167278:168000).
[official-gcc/graphite-test-results.git] / gcc / builtins.c
blobeb2aa3e6b34300d37cea3ee4862d3a5811819cfc
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
52 #include "builtins.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 struct target_builtins default_target_builtins;
64 #if SWITCHABLE_TARGET
65 struct target_builtins *this_target_builtins = &default_target_builtins;
66 #endif
68 /* Define the names of the builtin function types and codes. */
69 const char *const built_in_class_names[4]
70 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
72 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
73 const char * built_in_names[(int) END_BUILTINS] =
75 #include "builtins.def"
77 #undef DEF_BUILTIN
79 /* Setup an array of _DECL trees, make sure each element is
80 initialized to NULL_TREE. */
81 tree built_in_decls[(int) END_BUILTINS];
82 /* Declarations used when constructing the builtin implicitly in the compiler.
83 It may be NULL_TREE when this is invalid (for instance runtime is not
84 required to implement the function call in all cases). */
85 tree implicit_built_in_decls[(int) END_BUILTINS];
87 static const char *c_getstr (tree);
88 static rtx c_readstr (const char *, enum machine_mode);
89 static int target_char_cast (tree, char *);
90 static rtx get_memory_rtx (tree, tree);
91 static int apply_args_size (void);
92 static int apply_result_size (void);
93 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
94 static rtx result_vector (int, rtx);
95 #endif
96 static void expand_builtin_update_setjmp_buf (rtx);
97 static void expand_builtin_prefetch (tree);
98 static rtx expand_builtin_apply_args (void);
99 static rtx expand_builtin_apply_args_1 (void);
100 static rtx expand_builtin_apply (rtx, rtx, rtx);
101 static void expand_builtin_return (rtx);
102 static enum type_class type_to_class (tree);
103 static rtx expand_builtin_classify_type (tree);
104 static void expand_errno_check (tree, rtx);
105 static rtx expand_builtin_mathfn (tree, rtx, rtx);
106 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_strcmp (tree, rtx);
120 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
121 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
122 static rtx expand_builtin_memcpy (tree, rtx);
123 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
124 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
125 enum machine_mode, int);
126 static rtx expand_builtin_strcpy (tree, rtx);
127 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
128 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_strncpy (tree, rtx);
130 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
131 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
133 static rtx expand_builtin_bzero (tree);
134 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_alloca (tree, bool);
136 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
137 static rtx expand_builtin_frame_address (tree, tree);
138 static tree stabilize_va_list_loc (location_t, tree, int);
139 static rtx expand_builtin_expect (tree, rtx);
140 static tree fold_builtin_constant_p (tree);
141 static tree fold_builtin_expect (location_t, tree, tree);
142 static tree fold_builtin_classify_type (tree);
143 static tree fold_builtin_strlen (location_t, tree, tree);
144 static tree fold_builtin_inf (location_t, tree, int);
145 static tree fold_builtin_nan (tree, tree, int);
146 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
147 static bool validate_arg (const_tree, enum tree_code code);
148 static bool integer_valued_real_p (tree);
149 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
150 static bool readonly_data_expr (tree);
151 static rtx expand_builtin_fabs (tree, rtx, rtx);
152 static rtx expand_builtin_signbit (tree, rtx);
153 static tree fold_builtin_sqrt (location_t, tree, tree);
154 static tree fold_builtin_cbrt (location_t, tree, tree);
155 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
156 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
157 static tree fold_builtin_cos (location_t, tree, tree, tree);
158 static tree fold_builtin_cosh (location_t, tree, tree, tree);
159 static tree fold_builtin_tan (tree, tree);
160 static tree fold_builtin_trunc (location_t, tree, tree);
161 static tree fold_builtin_floor (location_t, tree, tree);
162 static tree fold_builtin_ceil (location_t, tree, tree);
163 static tree fold_builtin_round (location_t, tree, tree);
164 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
165 static tree fold_builtin_bitop (tree, tree);
166 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
167 static tree fold_builtin_strchr (location_t, tree, tree, tree);
168 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
170 static tree fold_builtin_strcmp (location_t, tree, tree);
171 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
172 static tree fold_builtin_signbit (location_t, tree, tree);
173 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
174 static tree fold_builtin_isascii (location_t, tree);
175 static tree fold_builtin_toascii (location_t, tree);
176 static tree fold_builtin_isdigit (location_t, tree);
177 static tree fold_builtin_fabs (location_t, tree, tree);
178 static tree fold_builtin_abs (location_t, tree, tree);
179 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
180 enum tree_code);
181 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
182 static tree fold_builtin_0 (location_t, tree, bool);
183 static tree fold_builtin_1 (location_t, tree, tree, bool);
184 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
185 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
186 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
187 static tree fold_builtin_varargs (location_t, tree, tree, bool);
189 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
190 static tree fold_builtin_strstr (location_t, tree, tree, tree);
191 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
192 static tree fold_builtin_strcat (location_t, tree, tree);
193 static tree fold_builtin_strncat (location_t, tree, tree, tree);
194 static tree fold_builtin_strspn (location_t, tree, tree);
195 static tree fold_builtin_strcspn (location_t, tree, tree);
196 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
198 static rtx expand_builtin_object_size (tree);
199 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
200 enum built_in_function);
201 static void maybe_emit_chk_warning (tree, enum built_in_function);
202 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_free_warning (tree);
204 static tree fold_builtin_object_size (tree, tree);
205 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
206 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
207 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
208 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
209 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
210 enum built_in_function);
211 static bool init_target_chars (void);
213 static unsigned HOST_WIDE_INT target_newline;
214 static unsigned HOST_WIDE_INT target_percent;
215 static unsigned HOST_WIDE_INT target_c;
216 static unsigned HOST_WIDE_INT target_s;
217 static char target_percent_c[3];
218 static char target_percent_s[3];
219 static char target_percent_s_newline[4];
220 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
221 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
222 static tree do_mpfr_arg2 (tree, tree, tree,
223 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
224 static tree do_mpfr_arg3 (tree, tree, tree, tree,
225 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
226 static tree do_mpfr_sincos (tree, tree, tree);
227 static tree do_mpfr_bessel_n (tree, tree, tree,
228 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
229 const REAL_VALUE_TYPE *, bool);
230 static tree do_mpfr_remquo (tree, tree, tree);
231 static tree do_mpfr_lgamma_r (tree, tree, tree);
233 /* Return true if NAME starts with __builtin_ or __sync_. */
235 bool
236 is_builtin_name (const char *name)
238 if (strncmp (name, "__builtin_", 10) == 0)
239 return true;
240 if (strncmp (name, "__sync_", 7) == 0)
241 return true;
242 return false;
246 /* Return true if DECL is a function symbol representing a built-in. */
248 bool
249 is_builtin_fn (tree decl)
251 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
255 /* Return true if NODE should be considered for inline expansion regardless
256 of the optimization level. This means whenever a function is invoked with
257 its "internal" name, which normally contains the prefix "__builtin". */
259 static bool
260 called_as_built_in (tree node)
262 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
263 we want the name used to call the function, not the name it
264 will have. */
265 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
266 return is_builtin_name (name);
269 /* Return the alignment in bits of EXP, an object.
270 Don't return more than MAX_ALIGN no matter what. */
272 unsigned int
273 get_object_alignment (tree exp, unsigned int max_align)
275 HOST_WIDE_INT bitsize, bitpos;
276 tree offset;
277 enum machine_mode mode;
278 int unsignedp, volatilep;
279 unsigned int align, inner;
281 /* Get the innermost object and the constant (bitpos) and possibly
282 variable (offset) offset of the access. */
283 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
284 &mode, &unsignedp, &volatilep, true);
286 /* Extract alignment information from the innermost object and
287 possibly adjust bitpos and offset. */
288 if (TREE_CODE (exp) == CONST_DECL)
289 exp = DECL_INITIAL (exp);
290 if (DECL_P (exp)
291 && TREE_CODE (exp) != LABEL_DECL)
292 align = DECL_ALIGN (exp);
293 else if (CONSTANT_CLASS_P (exp))
295 align = TYPE_ALIGN (TREE_TYPE (exp));
296 #ifdef CONSTANT_ALIGNMENT
297 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
298 #endif
300 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
301 align = TYPE_ALIGN (TREE_TYPE (exp));
302 else if (TREE_CODE (exp) == INDIRECT_REF)
303 align = TYPE_ALIGN (TREE_TYPE (exp));
304 else if (TREE_CODE (exp) == MEM_REF)
306 tree addr = TREE_OPERAND (exp, 0);
307 struct ptr_info_def *pi;
308 if (TREE_CODE (addr) == BIT_AND_EXPR
309 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
311 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
312 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
313 align *= BITS_PER_UNIT;
314 addr = TREE_OPERAND (addr, 0);
316 else
317 align = BITS_PER_UNIT;
318 if (TREE_CODE (addr) == SSA_NAME
319 && (pi = SSA_NAME_PTR_INFO (addr)))
321 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
322 align = MAX (pi->align * BITS_PER_UNIT, align);
324 else if (TREE_CODE (addr) == ADDR_EXPR)
325 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
326 max_align));
327 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
329 else if (TREE_CODE (exp) == TARGET_MEM_REF)
331 struct ptr_info_def *pi;
332 tree addr = TMR_BASE (exp);
333 if (TREE_CODE (addr) == BIT_AND_EXPR
334 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
336 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
337 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
338 align *= BITS_PER_UNIT;
339 addr = TREE_OPERAND (addr, 0);
341 else
342 align = BITS_PER_UNIT;
343 if (TREE_CODE (addr) == SSA_NAME
344 && (pi = SSA_NAME_PTR_INFO (addr)))
346 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
347 align = MAX (pi->align * BITS_PER_UNIT, align);
349 else if (TREE_CODE (addr) == ADDR_EXPR)
350 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0),
351 max_align));
352 if (TMR_OFFSET (exp))
353 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
354 if (TMR_INDEX (exp) && TMR_STEP (exp))
356 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
357 align = MIN (align, (step & -step) * BITS_PER_UNIT);
359 else if (TMR_INDEX (exp))
360 align = BITS_PER_UNIT;
361 if (TMR_INDEX2 (exp))
362 align = BITS_PER_UNIT;
364 else
365 align = BITS_PER_UNIT;
367 /* If there is a non-constant offset part extract the maximum
368 alignment that can prevail. */
369 inner = max_align;
370 while (offset)
372 tree next_offset;
374 if (TREE_CODE (offset) == PLUS_EXPR)
376 next_offset = TREE_OPERAND (offset, 0);
377 offset = TREE_OPERAND (offset, 1);
379 else
380 next_offset = NULL;
381 if (host_integerp (offset, 1))
383 /* Any overflow in calculating offset_bits won't change
384 the alignment. */
385 unsigned offset_bits
386 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
388 if (offset_bits)
389 inner = MIN (inner, (offset_bits & -offset_bits));
391 else if (TREE_CODE (offset) == MULT_EXPR
392 && host_integerp (TREE_OPERAND (offset, 1), 1))
394 /* Any overflow in calculating offset_factor won't change
395 the alignment. */
396 unsigned offset_factor
397 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
398 * BITS_PER_UNIT);
400 if (offset_factor)
401 inner = MIN (inner, (offset_factor & -offset_factor));
403 else
405 inner = MIN (inner, BITS_PER_UNIT);
406 break;
408 offset = next_offset;
411 /* Alignment is innermost object alignment adjusted by the constant
412 and non-constant offset parts. */
413 align = MIN (align, inner);
414 bitpos = bitpos & (align - 1);
416 /* align and bitpos now specify known low bits of the pointer.
417 ptr & (align - 1) == bitpos. */
419 if (bitpos != 0)
420 align = (bitpos & -bitpos);
422 return MIN (align, max_align);
425 /* Returns true iff we can trust that alignment information has been
426 calculated properly. */
428 bool
429 can_trust_pointer_alignment (void)
431 /* We rely on TER to compute accurate alignment information. */
432 return (optimize && flag_tree_ter);
435 /* Return the alignment in bits of EXP, a pointer valued expression.
436 But don't return more than MAX_ALIGN no matter what.
437 The alignment returned is, by default, the alignment of the thing that
438 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
440 Otherwise, look at the expression to see if we can do better, i.e., if the
441 expression is actually pointing at an object whose alignment is tighter. */
443 unsigned int
444 get_pointer_alignment (tree exp, unsigned int max_align)
446 STRIP_NOPS (exp);
448 if (TREE_CODE (exp) == ADDR_EXPR)
449 return get_object_alignment (TREE_OPERAND (exp, 0), max_align);
450 else if (TREE_CODE (exp) == SSA_NAME
451 && POINTER_TYPE_P (TREE_TYPE (exp)))
453 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
454 unsigned align;
455 if (!pi)
456 return BITS_PER_UNIT;
457 if (pi->misalign != 0)
458 align = (pi->misalign & -pi->misalign);
459 else
460 align = pi->align;
461 return MIN (max_align, align * BITS_PER_UNIT);
464 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
467 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
468 way, because it could contain a zero byte in the middle.
469 TREE_STRING_LENGTH is the size of the character array, not the string.
471 ONLY_VALUE should be nonzero if the result is not going to be emitted
472 into the instruction stream and zero if it is going to be expanded.
473 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
474 is returned, otherwise NULL, since
475 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
476 evaluate the side-effects.
478 The value returned is of type `ssizetype'.
480 Unfortunately, string_constant can't access the values of const char
481 arrays with initializers, so neither can we do so here. */
483 tree
484 c_strlen (tree src, int only_value)
486 tree offset_node;
487 HOST_WIDE_INT offset;
488 int max;
489 const char *ptr;
490 location_t loc;
492 STRIP_NOPS (src);
493 if (TREE_CODE (src) == COND_EXPR
494 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
496 tree len1, len2;
498 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
499 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
500 if (tree_int_cst_equal (len1, len2))
501 return len1;
504 if (TREE_CODE (src) == COMPOUND_EXPR
505 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
506 return c_strlen (TREE_OPERAND (src, 1), only_value);
508 loc = EXPR_LOC_OR_HERE (src);
510 src = string_constant (src, &offset_node);
511 if (src == 0)
512 return NULL_TREE;
514 max = TREE_STRING_LENGTH (src) - 1;
515 ptr = TREE_STRING_POINTER (src);
517 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
519 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
520 compute the offset to the following null if we don't know where to
521 start searching for it. */
522 int i;
524 for (i = 0; i < max; i++)
525 if (ptr[i] == 0)
526 return NULL_TREE;
528 /* We don't know the starting offset, but we do know that the string
529 has no internal zero bytes. We can assume that the offset falls
530 within the bounds of the string; otherwise, the programmer deserves
531 what he gets. Subtract the offset from the length of the string,
532 and return that. This would perhaps not be valid if we were dealing
533 with named arrays in addition to literal string constants. */
535 return size_diffop_loc (loc, size_int (max), offset_node);
538 /* We have a known offset into the string. Start searching there for
539 a null character if we can represent it as a single HOST_WIDE_INT. */
540 if (offset_node == 0)
541 offset = 0;
542 else if (! host_integerp (offset_node, 0))
543 offset = -1;
544 else
545 offset = tree_low_cst (offset_node, 0);
547 /* If the offset is known to be out of bounds, warn, and call strlen at
548 runtime. */
549 if (offset < 0 || offset > max)
551 /* Suppress multiple warnings for propagated constant strings. */
552 if (! TREE_NO_WARNING (src))
554 warning_at (loc, 0, "offset outside bounds of constant string");
555 TREE_NO_WARNING (src) = 1;
557 return NULL_TREE;
560 /* Use strlen to search for the first zero byte. Since any strings
561 constructed with build_string will have nulls appended, we win even
562 if we get handed something like (char[4])"abcd".
564 Since OFFSET is our starting index into the string, no further
565 calculation is needed. */
566 return ssize_int (strlen (ptr + offset));
569 /* Return a char pointer for a C string if it is a string constant
570 or sum of string constant and integer constant. */
572 static const char *
573 c_getstr (tree src)
575 tree offset_node;
577 src = string_constant (src, &offset_node);
578 if (src == 0)
579 return 0;
581 if (offset_node == 0)
582 return TREE_STRING_POINTER (src);
583 else if (!host_integerp (offset_node, 1)
584 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
585 return 0;
587 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
590 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
591 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
593 static rtx
594 c_readstr (const char *str, enum machine_mode mode)
596 HOST_WIDE_INT c[2];
597 HOST_WIDE_INT ch;
598 unsigned int i, j;
600 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
602 c[0] = 0;
603 c[1] = 0;
604 ch = 1;
605 for (i = 0; i < GET_MODE_SIZE (mode); i++)
607 j = i;
608 if (WORDS_BIG_ENDIAN)
609 j = GET_MODE_SIZE (mode) - i - 1;
610 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
611 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
612 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
613 j *= BITS_PER_UNIT;
614 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
616 if (ch)
617 ch = (unsigned char) str[i];
618 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
620 return immed_double_const (c[0], c[1], mode);
623 /* Cast a target constant CST to target CHAR and if that value fits into
624 host char type, return zero and put that value into variable pointed to by
625 P. */
627 static int
628 target_char_cast (tree cst, char *p)
630 unsigned HOST_WIDE_INT val, hostval;
632 if (TREE_CODE (cst) != INTEGER_CST
633 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
634 return 1;
636 val = TREE_INT_CST_LOW (cst);
637 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
638 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
640 hostval = val;
641 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
642 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
644 if (val != hostval)
645 return 1;
647 *p = hostval;
648 return 0;
651 /* Similar to save_expr, but assumes that arbitrary code is not executed
652 in between the multiple evaluations. In particular, we assume that a
653 non-addressable local variable will not be modified. */
655 static tree
656 builtin_save_expr (tree exp)
658 if (TREE_ADDRESSABLE (exp) == 0
659 && (TREE_CODE (exp) == PARM_DECL
660 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
661 return exp;
663 return save_expr (exp);
666 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
667 times to get the address of either a higher stack frame, or a return
668 address located within it (depending on FNDECL_CODE). */
670 static rtx
671 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
673 int i;
675 #ifdef INITIAL_FRAME_ADDRESS_RTX
676 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
677 #else
678 rtx tem;
680 /* For a zero count with __builtin_return_address, we don't care what
681 frame address we return, because target-specific definitions will
682 override us. Therefore frame pointer elimination is OK, and using
683 the soft frame pointer is OK.
685 For a nonzero count, or a zero count with __builtin_frame_address,
686 we require a stable offset from the current frame pointer to the
687 previous one, so we must use the hard frame pointer, and
688 we must disable frame pointer elimination. */
689 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
690 tem = frame_pointer_rtx;
691 else
693 tem = hard_frame_pointer_rtx;
695 /* Tell reload not to eliminate the frame pointer. */
696 crtl->accesses_prior_frames = 1;
698 #endif
700 /* Some machines need special handling before we can access
701 arbitrary frames. For example, on the SPARC, we must first flush
702 all register windows to the stack. */
703 #ifdef SETUP_FRAME_ADDRESSES
704 if (count > 0)
705 SETUP_FRAME_ADDRESSES ();
706 #endif
708 /* On the SPARC, the return address is not in the frame, it is in a
709 register. There is no way to access it off of the current frame
710 pointer, but it can be accessed off the previous frame pointer by
711 reading the value from the register window save area. */
712 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
713 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
714 count--;
715 #endif
717 /* Scan back COUNT frames to the specified frame. */
718 for (i = 0; i < count; i++)
720 /* Assume the dynamic chain pointer is in the word that the
721 frame address points to, unless otherwise specified. */
722 #ifdef DYNAMIC_CHAIN_ADDRESS
723 tem = DYNAMIC_CHAIN_ADDRESS (tem);
724 #endif
725 tem = memory_address (Pmode, tem);
726 tem = gen_frame_mem (Pmode, tem);
727 tem = copy_to_reg (tem);
730 /* For __builtin_frame_address, return what we've got. But, on
731 the SPARC for example, we may have to add a bias. */
732 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
733 #ifdef FRAME_ADDR_RTX
734 return FRAME_ADDR_RTX (tem);
735 #else
736 return tem;
737 #endif
739 /* For __builtin_return_address, get the return address from that frame. */
740 #ifdef RETURN_ADDR_RTX
741 tem = RETURN_ADDR_RTX (count, tem);
742 #else
743 tem = memory_address (Pmode,
744 plus_constant (tem, GET_MODE_SIZE (Pmode)));
745 tem = gen_frame_mem (Pmode, tem);
746 #endif
747 return tem;
750 /* Alias set used for setjmp buffer. */
751 static alias_set_type setjmp_alias_set = -1;
753 /* Construct the leading half of a __builtin_setjmp call. Control will
754 return to RECEIVER_LABEL. This is also called directly by the SJLJ
755 exception handling code. */
757 void
758 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
760 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
761 rtx stack_save;
762 rtx mem;
764 if (setjmp_alias_set == -1)
765 setjmp_alias_set = new_alias_set ();
767 buf_addr = convert_memory_address (Pmode, buf_addr);
769 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
771 /* We store the frame pointer and the address of receiver_label in
772 the buffer and use the rest of it for the stack save area, which
773 is machine-dependent. */
775 mem = gen_rtx_MEM (Pmode, buf_addr);
776 set_mem_alias_set (mem, setjmp_alias_set);
777 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
779 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
780 set_mem_alias_set (mem, setjmp_alias_set);
782 emit_move_insn (validize_mem (mem),
783 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
785 stack_save = gen_rtx_MEM (sa_mode,
786 plus_constant (buf_addr,
787 2 * GET_MODE_SIZE (Pmode)));
788 set_mem_alias_set (stack_save, setjmp_alias_set);
789 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
791 /* If there is further processing to do, do it. */
792 #ifdef HAVE_builtin_setjmp_setup
793 if (HAVE_builtin_setjmp_setup)
794 emit_insn (gen_builtin_setjmp_setup (buf_addr));
795 #endif
797 /* Tell optimize_save_area_alloca that extra work is going to
798 need to go on during alloca. */
799 cfun->calls_setjmp = 1;
801 /* We have a nonlocal label. */
802 cfun->has_nonlocal_label = 1;
805 /* Construct the trailing part of a __builtin_setjmp call. This is
806 also called directly by the SJLJ exception handling code. */
808 void
809 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
811 rtx chain;
813 /* Clobber the FP when we get here, so we have to make sure it's
814 marked as used by this function. */
815 emit_use (hard_frame_pointer_rtx);
817 /* Mark the static chain as clobbered here so life information
818 doesn't get messed up for it. */
819 chain = targetm.calls.static_chain (current_function_decl, true);
820 if (chain && REG_P (chain))
821 emit_clobber (chain);
823 /* Now put in the code to restore the frame pointer, and argument
824 pointer, if needed. */
825 #ifdef HAVE_nonlocal_goto
826 if (! HAVE_nonlocal_goto)
827 #endif
829 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
830 /* This might change the hard frame pointer in ways that aren't
831 apparent to early optimization passes, so force a clobber. */
832 emit_clobber (hard_frame_pointer_rtx);
835 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
836 if (fixed_regs[ARG_POINTER_REGNUM])
838 #ifdef ELIMINABLE_REGS
839 size_t i;
840 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
842 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
843 if (elim_regs[i].from == ARG_POINTER_REGNUM
844 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
845 break;
847 if (i == ARRAY_SIZE (elim_regs))
848 #endif
850 /* Now restore our arg pointer from the address at which it
851 was saved in our stack frame. */
852 emit_move_insn (crtl->args.internal_arg_pointer,
853 copy_to_reg (get_arg_pointer_save_area ()));
856 #endif
858 #ifdef HAVE_builtin_setjmp_receiver
859 if (HAVE_builtin_setjmp_receiver)
860 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
861 else
862 #endif
863 #ifdef HAVE_nonlocal_goto_receiver
864 if (HAVE_nonlocal_goto_receiver)
865 emit_insn (gen_nonlocal_goto_receiver ());
866 else
867 #endif
868 { /* Nothing */ }
870 /* We must not allow the code we just generated to be reordered by
871 scheduling. Specifically, the update of the frame pointer must
872 happen immediately, not later. */
873 emit_insn (gen_blockage ());
876 /* __builtin_longjmp is passed a pointer to an array of five words (not
877 all will be used on all machines). It operates similarly to the C
878 library function of the same name, but is more efficient. Much of
879 the code below is copied from the handling of non-local gotos. */
881 static void
882 expand_builtin_longjmp (rtx buf_addr, rtx value)
884 rtx fp, lab, stack, insn, last;
885 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
887 /* DRAP is needed for stack realign if longjmp is expanded to current
888 function */
889 if (SUPPORTS_STACK_ALIGNMENT)
890 crtl->need_drap = true;
892 if (setjmp_alias_set == -1)
893 setjmp_alias_set = new_alias_set ();
895 buf_addr = convert_memory_address (Pmode, buf_addr);
897 buf_addr = force_reg (Pmode, buf_addr);
899 /* We require that the user must pass a second argument of 1, because
900 that is what builtin_setjmp will return. */
901 gcc_assert (value == const1_rtx);
903 last = get_last_insn ();
904 #ifdef HAVE_builtin_longjmp
905 if (HAVE_builtin_longjmp)
906 emit_insn (gen_builtin_longjmp (buf_addr));
907 else
908 #endif
910 fp = gen_rtx_MEM (Pmode, buf_addr);
911 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
912 GET_MODE_SIZE (Pmode)));
914 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
915 2 * GET_MODE_SIZE (Pmode)));
916 set_mem_alias_set (fp, setjmp_alias_set);
917 set_mem_alias_set (lab, setjmp_alias_set);
918 set_mem_alias_set (stack, setjmp_alias_set);
920 /* Pick up FP, label, and SP from the block and jump. This code is
921 from expand_goto in stmt.c; see there for detailed comments. */
922 #ifdef HAVE_nonlocal_goto
923 if (HAVE_nonlocal_goto)
924 /* We have to pass a value to the nonlocal_goto pattern that will
925 get copied into the static_chain pointer, but it does not matter
926 what that value is, because builtin_setjmp does not use it. */
927 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
928 else
929 #endif
931 lab = copy_to_reg (lab);
933 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
934 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
936 emit_move_insn (hard_frame_pointer_rtx, fp);
937 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
939 emit_use (hard_frame_pointer_rtx);
940 emit_use (stack_pointer_rtx);
941 emit_indirect_jump (lab);
945 /* Search backwards and mark the jump insn as a non-local goto.
946 Note that this precludes the use of __builtin_longjmp to a
947 __builtin_setjmp target in the same function. However, we've
948 already cautioned the user that these functions are for
949 internal exception handling use only. */
950 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
952 gcc_assert (insn != last);
954 if (JUMP_P (insn))
956 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
957 break;
959 else if (CALL_P (insn))
960 break;
964 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
965 and the address of the save area. */
967 static rtx
968 expand_builtin_nonlocal_goto (tree exp)
970 tree t_label, t_save_area;
971 rtx r_label, r_save_area, r_fp, r_sp, insn;
973 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
974 return NULL_RTX;
976 t_label = CALL_EXPR_ARG (exp, 0);
977 t_save_area = CALL_EXPR_ARG (exp, 1);
979 r_label = expand_normal (t_label);
980 r_label = convert_memory_address (Pmode, r_label);
981 r_save_area = expand_normal (t_save_area);
982 r_save_area = convert_memory_address (Pmode, r_save_area);
983 /* Copy the address of the save location to a register just in case it was based
984 on the frame pointer. */
985 r_save_area = copy_to_reg (r_save_area);
986 r_fp = gen_rtx_MEM (Pmode, r_save_area);
987 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
988 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
990 crtl->has_nonlocal_goto = 1;
992 #ifdef HAVE_nonlocal_goto
993 /* ??? We no longer need to pass the static chain value, afaik. */
994 if (HAVE_nonlocal_goto)
995 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
996 else
997 #endif
999 r_label = copy_to_reg (r_label);
1001 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1002 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1004 /* Restore frame pointer for containing function.
1005 This sets the actual hard register used for the frame pointer
1006 to the location of the function's incoming static chain info.
1007 The non-local goto handler will then adjust it to contain the
1008 proper value and reload the argument pointer, if needed. */
1009 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1010 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
1012 /* USE of hard_frame_pointer_rtx added for consistency;
1013 not clear if really needed. */
1014 emit_use (hard_frame_pointer_rtx);
1015 emit_use (stack_pointer_rtx);
1017 /* If the architecture is using a GP register, we must
1018 conservatively assume that the target function makes use of it.
1019 The prologue of functions with nonlocal gotos must therefore
1020 initialize the GP register to the appropriate value, and we
1021 must then make sure that this value is live at the point
1022 of the jump. (Note that this doesn't necessarily apply
1023 to targets with a nonlocal_goto pattern; they are free
1024 to implement it in their own way. Note also that this is
1025 a no-op if the GP register is a global invariant.) */
1026 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1027 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1028 emit_use (pic_offset_table_rtx);
1030 emit_indirect_jump (r_label);
1033 /* Search backwards to the jump insn and mark it as a
1034 non-local goto. */
1035 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1037 if (JUMP_P (insn))
1039 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1040 break;
1042 else if (CALL_P (insn))
1043 break;
1046 return const0_rtx;
1049 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1050 (not all will be used on all machines) that was passed to __builtin_setjmp.
1051 It updates the stack pointer in that block to correspond to the current
1052 stack pointer. */
1054 static void
1055 expand_builtin_update_setjmp_buf (rtx buf_addr)
1057 enum machine_mode sa_mode = Pmode;
1058 rtx stack_save;
1061 #ifdef HAVE_save_stack_nonlocal
1062 if (HAVE_save_stack_nonlocal)
1063 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1064 #endif
1065 #ifdef STACK_SAVEAREA_MODE
1066 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1067 #endif
1069 stack_save
1070 = gen_rtx_MEM (sa_mode,
1071 memory_address
1072 (sa_mode,
1073 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1075 #ifdef HAVE_setjmp
1076 if (HAVE_setjmp)
1077 emit_insn (gen_setjmp ());
1078 #endif
1080 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1083 /* Expand a call to __builtin_prefetch. For a target that does not support
1084 data prefetch, evaluate the memory address argument in case it has side
1085 effects. */
1087 static void
1088 expand_builtin_prefetch (tree exp)
1090 tree arg0, arg1, arg2;
1091 int nargs;
1092 rtx op0, op1, op2;
1094 if (!validate_arglist (exp, POINTER_TYPE, 0))
1095 return;
1097 arg0 = CALL_EXPR_ARG (exp, 0);
1099 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1100 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1101 locality). */
1102 nargs = call_expr_nargs (exp);
1103 if (nargs > 1)
1104 arg1 = CALL_EXPR_ARG (exp, 1);
1105 else
1106 arg1 = integer_zero_node;
1107 if (nargs > 2)
1108 arg2 = CALL_EXPR_ARG (exp, 2);
1109 else
1110 arg2 = integer_three_node;
1112 /* Argument 0 is an address. */
1113 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1115 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1116 if (TREE_CODE (arg1) != INTEGER_CST)
1118 error ("second argument to %<__builtin_prefetch%> must be a constant");
1119 arg1 = integer_zero_node;
1121 op1 = expand_normal (arg1);
1122 /* Argument 1 must be either zero or one. */
1123 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1125 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1126 " using zero");
1127 op1 = const0_rtx;
1130 /* Argument 2 (locality) must be a compile-time constant int. */
1131 if (TREE_CODE (arg2) != INTEGER_CST)
1133 error ("third argument to %<__builtin_prefetch%> must be a constant");
1134 arg2 = integer_zero_node;
1136 op2 = expand_normal (arg2);
1137 /* Argument 2 must be 0, 1, 2, or 3. */
1138 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1140 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1141 op2 = const0_rtx;
1144 #ifdef HAVE_prefetch
1145 if (HAVE_prefetch)
1147 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1148 (op0,
1149 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1150 || (GET_MODE (op0) != Pmode))
1152 op0 = convert_memory_address (Pmode, op0);
1153 op0 = force_reg (Pmode, op0);
1155 emit_insn (gen_prefetch (op0, op1, op2));
1157 #endif
1159 /* Don't do anything with direct references to volatile memory, but
1160 generate code to handle other side effects. */
1161 if (!MEM_P (op0) && side_effects_p (op0))
1162 emit_insn (op0);
1165 /* Get a MEM rtx for expression EXP which is the address of an operand
1166 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1167 the maximum length of the block of memory that might be accessed or
1168 NULL if unknown. */
1170 static rtx
1171 get_memory_rtx (tree exp, tree len)
1173 tree orig_exp = exp;
1174 rtx addr, mem;
1175 HOST_WIDE_INT off;
1177 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1178 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1179 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1180 exp = TREE_OPERAND (exp, 0);
1182 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1183 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1185 /* Get an expression we can use to find the attributes to assign to MEM.
1186 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1187 we can. First remove any nops. */
1188 while (CONVERT_EXPR_P (exp)
1189 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1190 exp = TREE_OPERAND (exp, 0);
1192 off = 0;
1193 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1194 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1195 && host_integerp (TREE_OPERAND (exp, 1), 0)
1196 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1197 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1198 else if (TREE_CODE (exp) == ADDR_EXPR)
1199 exp = TREE_OPERAND (exp, 0);
1200 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1201 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1202 else
1203 exp = NULL;
1205 /* Honor attributes derived from exp, except for the alias set
1206 (as builtin stringops may alias with anything) and the size
1207 (as stringops may access multiple array elements). */
1208 if (exp)
1210 set_mem_attributes (mem, exp, 0);
1212 if (off)
1213 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1215 /* Allow the string and memory builtins to overflow from one
1216 field into another, see http://gcc.gnu.org/PR23561.
1217 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1218 memory accessed by the string or memory builtin will fit
1219 within the field. */
1220 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1222 tree mem_expr = MEM_EXPR (mem);
1223 HOST_WIDE_INT offset = -1, length = -1;
1224 tree inner = exp;
1226 while (TREE_CODE (inner) == ARRAY_REF
1227 || CONVERT_EXPR_P (inner)
1228 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1229 || TREE_CODE (inner) == SAVE_EXPR)
1230 inner = TREE_OPERAND (inner, 0);
1232 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1234 if (MEM_OFFSET (mem)
1235 && CONST_INT_P (MEM_OFFSET (mem)))
1236 offset = INTVAL (MEM_OFFSET (mem));
1238 if (offset >= 0 && len && host_integerp (len, 0))
1239 length = tree_low_cst (len, 0);
1241 while (TREE_CODE (inner) == COMPONENT_REF)
1243 tree field = TREE_OPERAND (inner, 1);
1244 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1245 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1247 /* Bitfields are generally not byte-addressable. */
1248 gcc_assert (!DECL_BIT_FIELD (field)
1249 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1250 % BITS_PER_UNIT) == 0
1251 && host_integerp (DECL_SIZE (field), 0)
1252 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1253 % BITS_PER_UNIT) == 0));
1255 /* If we can prove that the memory starting at XEXP (mem, 0) and
1256 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1257 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1258 fields without DECL_SIZE_UNIT like flexible array members. */
1259 if (length >= 0
1260 && DECL_SIZE_UNIT (field)
1261 && host_integerp (DECL_SIZE_UNIT (field), 0))
1263 HOST_WIDE_INT size
1264 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1265 if (offset <= size
1266 && length <= size
1267 && offset + length <= size)
1268 break;
1271 if (offset >= 0
1272 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1273 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1274 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1275 / BITS_PER_UNIT;
1276 else
1278 offset = -1;
1279 length = -1;
1282 mem_expr = TREE_OPERAND (mem_expr, 0);
1283 inner = TREE_OPERAND (inner, 0);
1286 if (mem_expr == NULL)
1287 offset = -1;
1288 if (mem_expr != MEM_EXPR (mem))
1290 set_mem_expr (mem, mem_expr);
1291 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1294 set_mem_alias_set (mem, 0);
1295 set_mem_size (mem, NULL_RTX);
1298 return mem;
1301 /* Built-in functions to perform an untyped call and return. */
1303 #define apply_args_mode \
1304 (this_target_builtins->x_apply_args_mode)
1305 #define apply_result_mode \
1306 (this_target_builtins->x_apply_result_mode)
1308 /* Return the size required for the block returned by __builtin_apply_args,
1309 and initialize apply_args_mode. */
1311 static int
1312 apply_args_size (void)
1314 static int size = -1;
1315 int align;
1316 unsigned int regno;
1317 enum machine_mode mode;
1319 /* The values computed by this function never change. */
1320 if (size < 0)
1322 /* The first value is the incoming arg-pointer. */
1323 size = GET_MODE_SIZE (Pmode);
1325 /* The second value is the structure value address unless this is
1326 passed as an "invisible" first argument. */
1327 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1328 size += GET_MODE_SIZE (Pmode);
1330 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1331 if (FUNCTION_ARG_REGNO_P (regno))
1333 mode = targetm.calls.get_raw_arg_mode (regno);
1335 gcc_assert (mode != VOIDmode);
1337 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1338 if (size % align != 0)
1339 size = CEIL (size, align) * align;
1340 size += GET_MODE_SIZE (mode);
1341 apply_args_mode[regno] = mode;
1343 else
1345 apply_args_mode[regno] = VOIDmode;
1348 return size;
1351 /* Return the size required for the block returned by __builtin_apply,
1352 and initialize apply_result_mode. */
1354 static int
1355 apply_result_size (void)
1357 static int size = -1;
1358 int align, regno;
1359 enum machine_mode mode;
1361 /* The values computed by this function never change. */
1362 if (size < 0)
1364 size = 0;
1366 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1367 if (targetm.calls.function_value_regno_p (regno))
1369 mode = targetm.calls.get_raw_result_mode (regno);
1371 gcc_assert (mode != VOIDmode);
1373 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1374 if (size % align != 0)
1375 size = CEIL (size, align) * align;
1376 size += GET_MODE_SIZE (mode);
1377 apply_result_mode[regno] = mode;
1379 else
1380 apply_result_mode[regno] = VOIDmode;
1382 /* Allow targets that use untyped_call and untyped_return to override
1383 the size so that machine-specific information can be stored here. */
1384 #ifdef APPLY_RESULT_SIZE
1385 size = APPLY_RESULT_SIZE;
1386 #endif
1388 return size;
1391 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1392 /* Create a vector describing the result block RESULT. If SAVEP is true,
1393 the result block is used to save the values; otherwise it is used to
1394 restore the values. */
1396 static rtx
1397 result_vector (int savep, rtx result)
1399 int regno, size, align, nelts;
1400 enum machine_mode mode;
1401 rtx reg, mem;
1402 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1404 size = nelts = 0;
1405 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1406 if ((mode = apply_result_mode[regno]) != VOIDmode)
1408 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1409 if (size % align != 0)
1410 size = CEIL (size, align) * align;
1411 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1412 mem = adjust_address (result, mode, size);
1413 savevec[nelts++] = (savep
1414 ? gen_rtx_SET (VOIDmode, mem, reg)
1415 : gen_rtx_SET (VOIDmode, reg, mem));
1416 size += GET_MODE_SIZE (mode);
1418 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1420 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1422 /* Save the state required to perform an untyped call with the same
1423 arguments as were passed to the current function. */
1425 static rtx
1426 expand_builtin_apply_args_1 (void)
1428 rtx registers, tem;
1429 int size, align, regno;
1430 enum machine_mode mode;
1431 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1433 /* Create a block where the arg-pointer, structure value address,
1434 and argument registers can be saved. */
1435 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1437 /* Walk past the arg-pointer and structure value address. */
1438 size = GET_MODE_SIZE (Pmode);
1439 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1440 size += GET_MODE_SIZE (Pmode);
1442 /* Save each register used in calling a function to the block. */
1443 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1444 if ((mode = apply_args_mode[regno]) != VOIDmode)
1446 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1447 if (size % align != 0)
1448 size = CEIL (size, align) * align;
1450 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1452 emit_move_insn (adjust_address (registers, mode, size), tem);
1453 size += GET_MODE_SIZE (mode);
1456 /* Save the arg pointer to the block. */
1457 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1458 #ifdef STACK_GROWS_DOWNWARD
1459 /* We need the pointer as the caller actually passed them to us, not
1460 as we might have pretended they were passed. Make sure it's a valid
1461 operand, as emit_move_insn isn't expected to handle a PLUS. */
1463 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1464 NULL_RTX);
1465 #endif
1466 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1468 size = GET_MODE_SIZE (Pmode);
1470 /* Save the structure value address unless this is passed as an
1471 "invisible" first argument. */
1472 if (struct_incoming_value)
1474 emit_move_insn (adjust_address (registers, Pmode, size),
1475 copy_to_reg (struct_incoming_value));
1476 size += GET_MODE_SIZE (Pmode);
1479 /* Return the address of the block. */
1480 return copy_addr_to_reg (XEXP (registers, 0));
1483 /* __builtin_apply_args returns block of memory allocated on
1484 the stack into which is stored the arg pointer, structure
1485 value address, static chain, and all the registers that might
1486 possibly be used in performing a function call. The code is
1487 moved to the start of the function so the incoming values are
1488 saved. */
1490 static rtx
1491 expand_builtin_apply_args (void)
1493 /* Don't do __builtin_apply_args more than once in a function.
1494 Save the result of the first call and reuse it. */
1495 if (apply_args_value != 0)
1496 return apply_args_value;
1498 /* When this function is called, it means that registers must be
1499 saved on entry to this function. So we migrate the
1500 call to the first insn of this function. */
1501 rtx temp;
1502 rtx seq;
1504 start_sequence ();
1505 temp = expand_builtin_apply_args_1 ();
1506 seq = get_insns ();
1507 end_sequence ();
1509 apply_args_value = temp;
1511 /* Put the insns after the NOTE that starts the function.
1512 If this is inside a start_sequence, make the outer-level insn
1513 chain current, so the code is placed at the start of the
1514 function. If internal_arg_pointer is a non-virtual pseudo,
1515 it needs to be placed after the function that initializes
1516 that pseudo. */
1517 push_topmost_sequence ();
1518 if (REG_P (crtl->args.internal_arg_pointer)
1519 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1520 emit_insn_before (seq, parm_birth_insn);
1521 else
1522 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1523 pop_topmost_sequence ();
1524 return temp;
1528 /* Perform an untyped call and save the state required to perform an
1529 untyped return of whatever value was returned by the given function. */
1531 static rtx
1532 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1534 int size, align, regno;
1535 enum machine_mode mode;
1536 rtx incoming_args, result, reg, dest, src, call_insn;
1537 rtx old_stack_level = 0;
1538 rtx call_fusage = 0;
1539 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1541 arguments = convert_memory_address (Pmode, arguments);
1543 /* Create a block where the return registers can be saved. */
1544 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1546 /* Fetch the arg pointer from the ARGUMENTS block. */
1547 incoming_args = gen_reg_rtx (Pmode);
1548 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1549 #ifndef STACK_GROWS_DOWNWARD
1550 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1551 incoming_args, 0, OPTAB_LIB_WIDEN);
1552 #endif
1554 /* Push a new argument block and copy the arguments. Do not allow
1555 the (potential) memcpy call below to interfere with our stack
1556 manipulations. */
1557 do_pending_stack_adjust ();
1558 NO_DEFER_POP;
1560 /* Save the stack with nonlocal if available. */
1561 #ifdef HAVE_save_stack_nonlocal
1562 if (HAVE_save_stack_nonlocal)
1563 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1564 else
1565 #endif
1566 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1568 /* Allocate a block of memory onto the stack and copy the memory
1569 arguments to the outgoing arguments address. We can pass TRUE
1570 as the 4th argument because we just saved the stack pointer
1571 and will restore it right after the call. */
1572 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1574 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1575 may have already set current_function_calls_alloca to true.
1576 current_function_calls_alloca won't be set if argsize is zero,
1577 so we have to guarantee need_drap is true here. */
1578 if (SUPPORTS_STACK_ALIGNMENT)
1579 crtl->need_drap = true;
1581 dest = virtual_outgoing_args_rtx;
1582 #ifndef STACK_GROWS_DOWNWARD
1583 if (CONST_INT_P (argsize))
1584 dest = plus_constant (dest, -INTVAL (argsize));
1585 else
1586 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1587 #endif
1588 dest = gen_rtx_MEM (BLKmode, dest);
1589 set_mem_align (dest, PARM_BOUNDARY);
1590 src = gen_rtx_MEM (BLKmode, incoming_args);
1591 set_mem_align (src, PARM_BOUNDARY);
1592 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1594 /* Refer to the argument block. */
1595 apply_args_size ();
1596 arguments = gen_rtx_MEM (BLKmode, arguments);
1597 set_mem_align (arguments, PARM_BOUNDARY);
1599 /* Walk past the arg-pointer and structure value address. */
1600 size = GET_MODE_SIZE (Pmode);
1601 if (struct_value)
1602 size += GET_MODE_SIZE (Pmode);
1604 /* Restore each of the registers previously saved. Make USE insns
1605 for each of these registers for use in making the call. */
1606 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1607 if ((mode = apply_args_mode[regno]) != VOIDmode)
1609 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1610 if (size % align != 0)
1611 size = CEIL (size, align) * align;
1612 reg = gen_rtx_REG (mode, regno);
1613 emit_move_insn (reg, adjust_address (arguments, mode, size));
1614 use_reg (&call_fusage, reg);
1615 size += GET_MODE_SIZE (mode);
1618 /* Restore the structure value address unless this is passed as an
1619 "invisible" first argument. */
1620 size = GET_MODE_SIZE (Pmode);
1621 if (struct_value)
1623 rtx value = gen_reg_rtx (Pmode);
1624 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1625 emit_move_insn (struct_value, value);
1626 if (REG_P (struct_value))
1627 use_reg (&call_fusage, struct_value);
1628 size += GET_MODE_SIZE (Pmode);
1631 /* All arguments and registers used for the call are set up by now! */
1632 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1634 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1635 and we don't want to load it into a register as an optimization,
1636 because prepare_call_address already did it if it should be done. */
1637 if (GET_CODE (function) != SYMBOL_REF)
1638 function = memory_address (FUNCTION_MODE, function);
1640 /* Generate the actual call instruction and save the return value. */
1641 #ifdef HAVE_untyped_call
1642 if (HAVE_untyped_call)
1643 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1644 result, result_vector (1, result)));
1645 else
1646 #endif
1647 #ifdef HAVE_call_value
1648 if (HAVE_call_value)
1650 rtx valreg = 0;
1652 /* Locate the unique return register. It is not possible to
1653 express a call that sets more than one return register using
1654 call_value; use untyped_call for that. In fact, untyped_call
1655 only needs to save the return registers in the given block. */
1656 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1657 if ((mode = apply_result_mode[regno]) != VOIDmode)
1659 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1661 valreg = gen_rtx_REG (mode, regno);
1664 emit_call_insn (GEN_CALL_VALUE (valreg,
1665 gen_rtx_MEM (FUNCTION_MODE, function),
1666 const0_rtx, NULL_RTX, const0_rtx));
1668 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1670 else
1671 #endif
1672 gcc_unreachable ();
1674 /* Find the CALL insn we just emitted, and attach the register usage
1675 information. */
1676 call_insn = last_call_insn ();
1677 add_function_usage_to (call_insn, call_fusage);
1679 /* Restore the stack. */
1680 #ifdef HAVE_save_stack_nonlocal
1681 if (HAVE_save_stack_nonlocal)
1682 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1683 else
1684 #endif
1685 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1687 OK_DEFER_POP;
1689 /* Return the address of the result block. */
1690 result = copy_addr_to_reg (XEXP (result, 0));
1691 return convert_memory_address (ptr_mode, result);
1694 /* Perform an untyped return. */
1696 static void
1697 expand_builtin_return (rtx result)
1699 int size, align, regno;
1700 enum machine_mode mode;
1701 rtx reg;
1702 rtx call_fusage = 0;
1704 result = convert_memory_address (Pmode, result);
1706 apply_result_size ();
1707 result = gen_rtx_MEM (BLKmode, result);
1709 #ifdef HAVE_untyped_return
1710 if (HAVE_untyped_return)
1712 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1713 emit_barrier ();
1714 return;
1716 #endif
1718 /* Restore the return value and note that each value is used. */
1719 size = 0;
1720 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1721 if ((mode = apply_result_mode[regno]) != VOIDmode)
1723 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1724 if (size % align != 0)
1725 size = CEIL (size, align) * align;
1726 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1727 emit_move_insn (reg, adjust_address (result, mode, size));
1729 push_to_sequence (call_fusage);
1730 emit_use (reg);
1731 call_fusage = get_insns ();
1732 end_sequence ();
1733 size += GET_MODE_SIZE (mode);
1736 /* Put the USE insns before the return. */
1737 emit_insn (call_fusage);
1739 /* Return whatever values was restored by jumping directly to the end
1740 of the function. */
1741 expand_naked_return ();
1744 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1746 static enum type_class
1747 type_to_class (tree type)
1749 switch (TREE_CODE (type))
1751 case VOID_TYPE: return void_type_class;
1752 case INTEGER_TYPE: return integer_type_class;
1753 case ENUMERAL_TYPE: return enumeral_type_class;
1754 case BOOLEAN_TYPE: return boolean_type_class;
1755 case POINTER_TYPE: return pointer_type_class;
1756 case REFERENCE_TYPE: return reference_type_class;
1757 case OFFSET_TYPE: return offset_type_class;
1758 case REAL_TYPE: return real_type_class;
1759 case COMPLEX_TYPE: return complex_type_class;
1760 case FUNCTION_TYPE: return function_type_class;
1761 case METHOD_TYPE: return method_type_class;
1762 case RECORD_TYPE: return record_type_class;
1763 case UNION_TYPE:
1764 case QUAL_UNION_TYPE: return union_type_class;
1765 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1766 ? string_type_class : array_type_class);
1767 case LANG_TYPE: return lang_type_class;
1768 default: return no_type_class;
1772 /* Expand a call EXP to __builtin_classify_type. */
1774 static rtx
1775 expand_builtin_classify_type (tree exp)
1777 if (call_expr_nargs (exp))
1778 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1779 return GEN_INT (no_type_class);
1782 /* This helper macro, meant to be used in mathfn_built_in below,
1783 determines which among a set of three builtin math functions is
1784 appropriate for a given type mode. The `F' and `L' cases are
1785 automatically generated from the `double' case. */
1786 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1787 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1788 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1789 fcodel = BUILT_IN_MATHFN##L ; break;
1790 /* Similar to above, but appends _R after any F/L suffix. */
1791 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1792 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1793 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1794 fcodel = BUILT_IN_MATHFN##L_R ; break;
1796 /* Return mathematic function equivalent to FN but operating directly
1797 on TYPE, if available. If IMPLICIT is true find the function in
1798 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1799 can't do the conversion, return zero. */
1801 static tree
1802 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1804 tree const *const fn_arr
1805 = implicit ? implicit_built_in_decls : built_in_decls;
1806 enum built_in_function fcode, fcodef, fcodel;
1808 switch (fn)
1810 CASE_MATHFN (BUILT_IN_ACOS)
1811 CASE_MATHFN (BUILT_IN_ACOSH)
1812 CASE_MATHFN (BUILT_IN_ASIN)
1813 CASE_MATHFN (BUILT_IN_ASINH)
1814 CASE_MATHFN (BUILT_IN_ATAN)
1815 CASE_MATHFN (BUILT_IN_ATAN2)
1816 CASE_MATHFN (BUILT_IN_ATANH)
1817 CASE_MATHFN (BUILT_IN_CBRT)
1818 CASE_MATHFN (BUILT_IN_CEIL)
1819 CASE_MATHFN (BUILT_IN_CEXPI)
1820 CASE_MATHFN (BUILT_IN_COPYSIGN)
1821 CASE_MATHFN (BUILT_IN_COS)
1822 CASE_MATHFN (BUILT_IN_COSH)
1823 CASE_MATHFN (BUILT_IN_DREM)
1824 CASE_MATHFN (BUILT_IN_ERF)
1825 CASE_MATHFN (BUILT_IN_ERFC)
1826 CASE_MATHFN (BUILT_IN_EXP)
1827 CASE_MATHFN (BUILT_IN_EXP10)
1828 CASE_MATHFN (BUILT_IN_EXP2)
1829 CASE_MATHFN (BUILT_IN_EXPM1)
1830 CASE_MATHFN (BUILT_IN_FABS)
1831 CASE_MATHFN (BUILT_IN_FDIM)
1832 CASE_MATHFN (BUILT_IN_FLOOR)
1833 CASE_MATHFN (BUILT_IN_FMA)
1834 CASE_MATHFN (BUILT_IN_FMAX)
1835 CASE_MATHFN (BUILT_IN_FMIN)
1836 CASE_MATHFN (BUILT_IN_FMOD)
1837 CASE_MATHFN (BUILT_IN_FREXP)
1838 CASE_MATHFN (BUILT_IN_GAMMA)
1839 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1840 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1841 CASE_MATHFN (BUILT_IN_HYPOT)
1842 CASE_MATHFN (BUILT_IN_ILOGB)
1843 CASE_MATHFN (BUILT_IN_INF)
1844 CASE_MATHFN (BUILT_IN_ISINF)
1845 CASE_MATHFN (BUILT_IN_J0)
1846 CASE_MATHFN (BUILT_IN_J1)
1847 CASE_MATHFN (BUILT_IN_JN)
1848 CASE_MATHFN (BUILT_IN_LCEIL)
1849 CASE_MATHFN (BUILT_IN_LDEXP)
1850 CASE_MATHFN (BUILT_IN_LFLOOR)
1851 CASE_MATHFN (BUILT_IN_LGAMMA)
1852 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1853 CASE_MATHFN (BUILT_IN_LLCEIL)
1854 CASE_MATHFN (BUILT_IN_LLFLOOR)
1855 CASE_MATHFN (BUILT_IN_LLRINT)
1856 CASE_MATHFN (BUILT_IN_LLROUND)
1857 CASE_MATHFN (BUILT_IN_LOG)
1858 CASE_MATHFN (BUILT_IN_LOG10)
1859 CASE_MATHFN (BUILT_IN_LOG1P)
1860 CASE_MATHFN (BUILT_IN_LOG2)
1861 CASE_MATHFN (BUILT_IN_LOGB)
1862 CASE_MATHFN (BUILT_IN_LRINT)
1863 CASE_MATHFN (BUILT_IN_LROUND)
1864 CASE_MATHFN (BUILT_IN_MODF)
1865 CASE_MATHFN (BUILT_IN_NAN)
1866 CASE_MATHFN (BUILT_IN_NANS)
1867 CASE_MATHFN (BUILT_IN_NEARBYINT)
1868 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1869 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1870 CASE_MATHFN (BUILT_IN_POW)
1871 CASE_MATHFN (BUILT_IN_POWI)
1872 CASE_MATHFN (BUILT_IN_POW10)
1873 CASE_MATHFN (BUILT_IN_REMAINDER)
1874 CASE_MATHFN (BUILT_IN_REMQUO)
1875 CASE_MATHFN (BUILT_IN_RINT)
1876 CASE_MATHFN (BUILT_IN_ROUND)
1877 CASE_MATHFN (BUILT_IN_SCALB)
1878 CASE_MATHFN (BUILT_IN_SCALBLN)
1879 CASE_MATHFN (BUILT_IN_SCALBN)
1880 CASE_MATHFN (BUILT_IN_SIGNBIT)
1881 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1882 CASE_MATHFN (BUILT_IN_SIN)
1883 CASE_MATHFN (BUILT_IN_SINCOS)
1884 CASE_MATHFN (BUILT_IN_SINH)
1885 CASE_MATHFN (BUILT_IN_SQRT)
1886 CASE_MATHFN (BUILT_IN_TAN)
1887 CASE_MATHFN (BUILT_IN_TANH)
1888 CASE_MATHFN (BUILT_IN_TGAMMA)
1889 CASE_MATHFN (BUILT_IN_TRUNC)
1890 CASE_MATHFN (BUILT_IN_Y0)
1891 CASE_MATHFN (BUILT_IN_Y1)
1892 CASE_MATHFN (BUILT_IN_YN)
1894 default:
1895 return NULL_TREE;
1898 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1899 return fn_arr[fcode];
1900 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1901 return fn_arr[fcodef];
1902 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1903 return fn_arr[fcodel];
1904 else
1905 return NULL_TREE;
1908 /* Like mathfn_built_in_1(), but always use the implicit array. */
1910 tree
1911 mathfn_built_in (tree type, enum built_in_function fn)
1913 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1916 /* If errno must be maintained, expand the RTL to check if the result,
1917 TARGET, of a built-in function call, EXP, is NaN, and if so set
1918 errno to EDOM. */
1920 static void
1921 expand_errno_check (tree exp, rtx target)
1923 rtx lab = gen_label_rtx ();
1925 /* Test the result; if it is NaN, set errno=EDOM because
1926 the argument was not in the domain. */
1927 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1928 NULL_RTX, NULL_RTX, lab,
1929 /* The jump is very likely. */
1930 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1932 #ifdef TARGET_EDOM
1933 /* If this built-in doesn't throw an exception, set errno directly. */
1934 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1936 #ifdef GEN_ERRNO_RTX
1937 rtx errno_rtx = GEN_ERRNO_RTX;
1938 #else
1939 rtx errno_rtx
1940 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1941 #endif
1942 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1943 emit_label (lab);
1944 return;
1946 #endif
1948 /* Make sure the library call isn't expanded as a tail call. */
1949 CALL_EXPR_TAILCALL (exp) = 0;
1951 /* We can't set errno=EDOM directly; let the library call do it.
1952 Pop the arguments right away in case the call gets deleted. */
1953 NO_DEFER_POP;
1954 expand_call (exp, target, 0);
1955 OK_DEFER_POP;
1956 emit_label (lab);
1959 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1960 Return NULL_RTX if a normal call should be emitted rather than expanding
1961 the function in-line. EXP is the expression that is a call to the builtin
1962 function; if convenient, the result should be placed in TARGET.
1963 SUBTARGET may be used as the target for computing one of EXP's operands. */
1965 static rtx
1966 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1968 optab builtin_optab;
1969 rtx op0, insns;
1970 tree fndecl = get_callee_fndecl (exp);
1971 enum machine_mode mode;
1972 bool errno_set = false;
1973 tree arg;
1975 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1976 return NULL_RTX;
1978 arg = CALL_EXPR_ARG (exp, 0);
1980 switch (DECL_FUNCTION_CODE (fndecl))
1982 CASE_FLT_FN (BUILT_IN_SQRT):
1983 errno_set = ! tree_expr_nonnegative_p (arg);
1984 builtin_optab = sqrt_optab;
1985 break;
1986 CASE_FLT_FN (BUILT_IN_EXP):
1987 errno_set = true; builtin_optab = exp_optab; break;
1988 CASE_FLT_FN (BUILT_IN_EXP10):
1989 CASE_FLT_FN (BUILT_IN_POW10):
1990 errno_set = true; builtin_optab = exp10_optab; break;
1991 CASE_FLT_FN (BUILT_IN_EXP2):
1992 errno_set = true; builtin_optab = exp2_optab; break;
1993 CASE_FLT_FN (BUILT_IN_EXPM1):
1994 errno_set = true; builtin_optab = expm1_optab; break;
1995 CASE_FLT_FN (BUILT_IN_LOGB):
1996 errno_set = true; builtin_optab = logb_optab; break;
1997 CASE_FLT_FN (BUILT_IN_LOG):
1998 errno_set = true; builtin_optab = log_optab; break;
1999 CASE_FLT_FN (BUILT_IN_LOG10):
2000 errno_set = true; builtin_optab = log10_optab; break;
2001 CASE_FLT_FN (BUILT_IN_LOG2):
2002 errno_set = true; builtin_optab = log2_optab; break;
2003 CASE_FLT_FN (BUILT_IN_LOG1P):
2004 errno_set = true; builtin_optab = log1p_optab; break;
2005 CASE_FLT_FN (BUILT_IN_ASIN):
2006 builtin_optab = asin_optab; break;
2007 CASE_FLT_FN (BUILT_IN_ACOS):
2008 builtin_optab = acos_optab; break;
2009 CASE_FLT_FN (BUILT_IN_TAN):
2010 builtin_optab = tan_optab; break;
2011 CASE_FLT_FN (BUILT_IN_ATAN):
2012 builtin_optab = atan_optab; break;
2013 CASE_FLT_FN (BUILT_IN_FLOOR):
2014 builtin_optab = floor_optab; break;
2015 CASE_FLT_FN (BUILT_IN_CEIL):
2016 builtin_optab = ceil_optab; break;
2017 CASE_FLT_FN (BUILT_IN_TRUNC):
2018 builtin_optab = btrunc_optab; break;
2019 CASE_FLT_FN (BUILT_IN_ROUND):
2020 builtin_optab = round_optab; break;
2021 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2022 builtin_optab = nearbyint_optab;
2023 if (flag_trapping_math)
2024 break;
2025 /* Else fallthrough and expand as rint. */
2026 CASE_FLT_FN (BUILT_IN_RINT):
2027 builtin_optab = rint_optab; break;
2028 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2029 builtin_optab = significand_optab; break;
2030 default:
2031 gcc_unreachable ();
2034 /* Make a suitable register to place result in. */
2035 mode = TYPE_MODE (TREE_TYPE (exp));
2037 if (! flag_errno_math || ! HONOR_NANS (mode))
2038 errno_set = false;
2040 /* Before working hard, check whether the instruction is available. */
2041 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2042 && (!errno_set || !optimize_insn_for_size_p ()))
2044 target = gen_reg_rtx (mode);
2046 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2047 need to expand the argument again. This way, we will not perform
2048 side-effects more the once. */
2049 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2051 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2053 start_sequence ();
2055 /* Compute into TARGET.
2056 Set TARGET to wherever the result comes back. */
2057 target = expand_unop (mode, builtin_optab, op0, target, 0);
2059 if (target != 0)
2061 if (errno_set)
2062 expand_errno_check (exp, target);
2064 /* Output the entire sequence. */
2065 insns = get_insns ();
2066 end_sequence ();
2067 emit_insn (insns);
2068 return target;
2071 /* If we were unable to expand via the builtin, stop the sequence
2072 (without outputting the insns) and call to the library function
2073 with the stabilized argument list. */
2074 end_sequence ();
2077 return expand_call (exp, target, target == const0_rtx);
2080 /* Expand a call to the builtin binary math functions (pow and atan2).
2081 Return NULL_RTX if a normal call should be emitted rather than expanding the
2082 function in-line. EXP is the expression that is a call to the builtin
2083 function; if convenient, the result should be placed in TARGET.
2084 SUBTARGET may be used as the target for computing one of EXP's
2085 operands. */
2087 static rtx
2088 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2090 optab builtin_optab;
2091 rtx op0, op1, insns;
2092 int op1_type = REAL_TYPE;
2093 tree fndecl = get_callee_fndecl (exp);
2094 tree arg0, arg1;
2095 enum machine_mode mode;
2096 bool errno_set = true;
2098 switch (DECL_FUNCTION_CODE (fndecl))
2100 CASE_FLT_FN (BUILT_IN_SCALBN):
2101 CASE_FLT_FN (BUILT_IN_SCALBLN):
2102 CASE_FLT_FN (BUILT_IN_LDEXP):
2103 op1_type = INTEGER_TYPE;
2104 default:
2105 break;
2108 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2109 return NULL_RTX;
2111 arg0 = CALL_EXPR_ARG (exp, 0);
2112 arg1 = CALL_EXPR_ARG (exp, 1);
2114 switch (DECL_FUNCTION_CODE (fndecl))
2116 CASE_FLT_FN (BUILT_IN_POW):
2117 builtin_optab = pow_optab; break;
2118 CASE_FLT_FN (BUILT_IN_ATAN2):
2119 builtin_optab = atan2_optab; break;
2120 CASE_FLT_FN (BUILT_IN_SCALB):
2121 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2122 return 0;
2123 builtin_optab = scalb_optab; break;
2124 CASE_FLT_FN (BUILT_IN_SCALBN):
2125 CASE_FLT_FN (BUILT_IN_SCALBLN):
2126 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2127 return 0;
2128 /* Fall through... */
2129 CASE_FLT_FN (BUILT_IN_LDEXP):
2130 builtin_optab = ldexp_optab; break;
2131 CASE_FLT_FN (BUILT_IN_FMOD):
2132 builtin_optab = fmod_optab; break;
2133 CASE_FLT_FN (BUILT_IN_REMAINDER):
2134 CASE_FLT_FN (BUILT_IN_DREM):
2135 builtin_optab = remainder_optab; break;
2136 default:
2137 gcc_unreachable ();
2140 /* Make a suitable register to place result in. */
2141 mode = TYPE_MODE (TREE_TYPE (exp));
2143 /* Before working hard, check whether the instruction is available. */
2144 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2145 return NULL_RTX;
2147 target = gen_reg_rtx (mode);
2149 if (! flag_errno_math || ! HONOR_NANS (mode))
2150 errno_set = false;
2152 if (errno_set && optimize_insn_for_size_p ())
2153 return 0;
2155 /* Always stabilize the argument list. */
2156 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2157 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2159 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2160 op1 = expand_normal (arg1);
2162 start_sequence ();
2164 /* Compute into TARGET.
2165 Set TARGET to wherever the result comes back. */
2166 target = expand_binop (mode, builtin_optab, op0, op1,
2167 target, 0, OPTAB_DIRECT);
2169 /* If we were unable to expand via the builtin, stop the sequence
2170 (without outputting the insns) and call to the library function
2171 with the stabilized argument list. */
2172 if (target == 0)
2174 end_sequence ();
2175 return expand_call (exp, target, target == const0_rtx);
2178 if (errno_set)
2179 expand_errno_check (exp, target);
2181 /* Output the entire sequence. */
2182 insns = get_insns ();
2183 end_sequence ();
2184 emit_insn (insns);
2186 return target;
2189 /* Expand a call to the builtin trinary math functions (fma).
2190 Return NULL_RTX if a normal call should be emitted rather than expanding the
2191 function in-line. EXP is the expression that is a call to the builtin
2192 function; if convenient, the result should be placed in TARGET.
2193 SUBTARGET may be used as the target for computing one of EXP's
2194 operands. */
2196 static rtx
2197 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2199 optab builtin_optab;
2200 rtx op0, op1, op2, insns;
2201 tree fndecl = get_callee_fndecl (exp);
2202 tree arg0, arg1, arg2;
2203 enum machine_mode mode;
2205 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2206 return NULL_RTX;
2208 arg0 = CALL_EXPR_ARG (exp, 0);
2209 arg1 = CALL_EXPR_ARG (exp, 1);
2210 arg2 = CALL_EXPR_ARG (exp, 2);
2212 switch (DECL_FUNCTION_CODE (fndecl))
2214 CASE_FLT_FN (BUILT_IN_FMA):
2215 builtin_optab = fma_optab; break;
2216 default:
2217 gcc_unreachable ();
2220 /* Make a suitable register to place result in. */
2221 mode = TYPE_MODE (TREE_TYPE (exp));
2223 /* Before working hard, check whether the instruction is available. */
2224 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2225 return NULL_RTX;
2227 target = gen_reg_rtx (mode);
2229 /* Always stabilize the argument list. */
2230 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2231 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2232 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2234 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2235 op1 = expand_normal (arg1);
2236 op2 = expand_normal (arg2);
2238 start_sequence ();
2240 /* Compute into TARGET.
2241 Set TARGET to wherever the result comes back. */
2242 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2243 target, 0);
2245 /* If we were unable to expand via the builtin, stop the sequence
2246 (without outputting the insns) and call to the library function
2247 with the stabilized argument list. */
2248 if (target == 0)
2250 end_sequence ();
2251 return expand_call (exp, target, target == const0_rtx);
2254 /* Output the entire sequence. */
2255 insns = get_insns ();
2256 end_sequence ();
2257 emit_insn (insns);
2259 return target;
2262 /* Expand a call to the builtin sin and cos math functions.
2263 Return NULL_RTX if a normal call should be emitted rather than expanding the
2264 function in-line. EXP is the expression that is a call to the builtin
2265 function; if convenient, the result should be placed in TARGET.
2266 SUBTARGET may be used as the target for computing one of EXP's
2267 operands. */
2269 static rtx
2270 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2272 optab builtin_optab;
2273 rtx op0, insns;
2274 tree fndecl = get_callee_fndecl (exp);
2275 enum machine_mode mode;
2276 tree arg;
2278 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2279 return NULL_RTX;
2281 arg = CALL_EXPR_ARG (exp, 0);
2283 switch (DECL_FUNCTION_CODE (fndecl))
2285 CASE_FLT_FN (BUILT_IN_SIN):
2286 CASE_FLT_FN (BUILT_IN_COS):
2287 builtin_optab = sincos_optab; break;
2288 default:
2289 gcc_unreachable ();
2292 /* Make a suitable register to place result in. */
2293 mode = TYPE_MODE (TREE_TYPE (exp));
2295 /* Check if sincos insn is available, otherwise fallback
2296 to sin or cos insn. */
2297 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2298 switch (DECL_FUNCTION_CODE (fndecl))
2300 CASE_FLT_FN (BUILT_IN_SIN):
2301 builtin_optab = sin_optab; break;
2302 CASE_FLT_FN (BUILT_IN_COS):
2303 builtin_optab = cos_optab; break;
2304 default:
2305 gcc_unreachable ();
2308 /* Before working hard, check whether the instruction is available. */
2309 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2311 target = gen_reg_rtx (mode);
2313 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2314 need to expand the argument again. This way, we will not perform
2315 side-effects more the once. */
2316 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2318 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2320 start_sequence ();
2322 /* Compute into TARGET.
2323 Set TARGET to wherever the result comes back. */
2324 if (builtin_optab == sincos_optab)
2326 int result;
2328 switch (DECL_FUNCTION_CODE (fndecl))
2330 CASE_FLT_FN (BUILT_IN_SIN):
2331 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2332 break;
2333 CASE_FLT_FN (BUILT_IN_COS):
2334 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2335 break;
2336 default:
2337 gcc_unreachable ();
2339 gcc_assert (result);
2341 else
2343 target = expand_unop (mode, builtin_optab, op0, target, 0);
2346 if (target != 0)
2348 /* Output the entire sequence. */
2349 insns = get_insns ();
2350 end_sequence ();
2351 emit_insn (insns);
2352 return target;
2355 /* If we were unable to expand via the builtin, stop the sequence
2356 (without outputting the insns) and call to the library function
2357 with the stabilized argument list. */
2358 end_sequence ();
2361 target = expand_call (exp, target, target == const0_rtx);
2363 return target;
2366 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2367 return an RTL instruction code that implements the functionality.
2368 If that isn't possible or available return CODE_FOR_nothing. */
2370 static enum insn_code
2371 interclass_mathfn_icode (tree arg, tree fndecl)
2373 bool errno_set = false;
2374 optab builtin_optab = 0;
2375 enum machine_mode mode;
2377 switch (DECL_FUNCTION_CODE (fndecl))
2379 CASE_FLT_FN (BUILT_IN_ILOGB):
2380 errno_set = true; builtin_optab = ilogb_optab; break;
2381 CASE_FLT_FN (BUILT_IN_ISINF):
2382 builtin_optab = isinf_optab; break;
2383 case BUILT_IN_ISNORMAL:
2384 case BUILT_IN_ISFINITE:
2385 CASE_FLT_FN (BUILT_IN_FINITE):
2386 case BUILT_IN_FINITED32:
2387 case BUILT_IN_FINITED64:
2388 case BUILT_IN_FINITED128:
2389 case BUILT_IN_ISINFD32:
2390 case BUILT_IN_ISINFD64:
2391 case BUILT_IN_ISINFD128:
2392 /* These builtins have no optabs (yet). */
2393 break;
2394 default:
2395 gcc_unreachable ();
2398 /* There's no easy way to detect the case we need to set EDOM. */
2399 if (flag_errno_math && errno_set)
2400 return CODE_FOR_nothing;
2402 /* Optab mode depends on the mode of the input argument. */
2403 mode = TYPE_MODE (TREE_TYPE (arg));
2405 if (builtin_optab)
2406 return optab_handler (builtin_optab, mode);
2407 return CODE_FOR_nothing;
2410 /* Expand a call to one of the builtin math functions that operate on
2411 floating point argument and output an integer result (ilogb, isinf,
2412 isnan, etc).
2413 Return 0 if a normal call should be emitted rather than expanding the
2414 function in-line. EXP is the expression that is a call to the builtin
2415 function; if convenient, the result should be placed in TARGET. */
2417 static rtx
2418 expand_builtin_interclass_mathfn (tree exp, rtx target)
2420 enum insn_code icode = CODE_FOR_nothing;
2421 rtx op0;
2422 tree fndecl = get_callee_fndecl (exp);
2423 enum machine_mode mode;
2424 tree arg;
2426 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2427 return NULL_RTX;
2429 arg = CALL_EXPR_ARG (exp, 0);
2430 icode = interclass_mathfn_icode (arg, fndecl);
2431 mode = TYPE_MODE (TREE_TYPE (arg));
2433 if (icode != CODE_FOR_nothing)
2435 rtx last = get_last_insn ();
2436 tree orig_arg = arg;
2437 /* Make a suitable register to place result in. */
2438 if (!target
2439 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp))
2440 || !insn_data[icode].operand[0].predicate (target, GET_MODE (target)))
2441 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2443 gcc_assert (insn_data[icode].operand[0].predicate
2444 (target, GET_MODE (target)));
2446 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2447 need to expand the argument again. This way, we will not perform
2448 side-effects more the once. */
2449 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2451 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2453 if (mode != GET_MODE (op0))
2454 op0 = convert_to_mode (mode, op0, 0);
2456 /* Compute into TARGET.
2457 Set TARGET to wherever the result comes back. */
2458 if (maybe_emit_unop_insn (icode, target, op0, UNKNOWN))
2459 return target;
2460 delete_insns_since (last);
2461 CALL_EXPR_ARG (exp, 0) = orig_arg;
2464 return NULL_RTX;
2467 /* Expand a call to the builtin sincos math function.
2468 Return NULL_RTX if a normal call should be emitted rather than expanding the
2469 function in-line. EXP is the expression that is a call to the builtin
2470 function. */
2472 static rtx
2473 expand_builtin_sincos (tree exp)
2475 rtx op0, op1, op2, target1, target2;
2476 enum machine_mode mode;
2477 tree arg, sinp, cosp;
2478 int result;
2479 location_t loc = EXPR_LOCATION (exp);
2480 tree alias_type, alias_off;
2482 if (!validate_arglist (exp, REAL_TYPE,
2483 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2484 return NULL_RTX;
2486 arg = CALL_EXPR_ARG (exp, 0);
2487 sinp = CALL_EXPR_ARG (exp, 1);
2488 cosp = CALL_EXPR_ARG (exp, 2);
2490 /* Make a suitable register to place result in. */
2491 mode = TYPE_MODE (TREE_TYPE (arg));
2493 /* Check if sincos insn is available, otherwise emit the call. */
2494 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2495 return NULL_RTX;
2497 target1 = gen_reg_rtx (mode);
2498 target2 = gen_reg_rtx (mode);
2500 op0 = expand_normal (arg);
2501 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2502 alias_off = build_int_cst (alias_type, 0);
2503 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2504 sinp, alias_off));
2505 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2506 cosp, alias_off));
2508 /* Compute into target1 and target2.
2509 Set TARGET to wherever the result comes back. */
2510 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2511 gcc_assert (result);
2513 /* Move target1 and target2 to the memory locations indicated
2514 by op1 and op2. */
2515 emit_move_insn (op1, target1);
2516 emit_move_insn (op2, target2);
2518 return const0_rtx;
2521 /* Expand a call to the internal cexpi builtin to the sincos math function.
2522 EXP is the expression that is a call to the builtin function; if convenient,
2523 the result should be placed in TARGET. */
2525 static rtx
2526 expand_builtin_cexpi (tree exp, rtx target)
2528 tree fndecl = get_callee_fndecl (exp);
2529 tree arg, type;
2530 enum machine_mode mode;
2531 rtx op0, op1, op2;
2532 location_t loc = EXPR_LOCATION (exp);
2534 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2535 return NULL_RTX;
2537 arg = CALL_EXPR_ARG (exp, 0);
2538 type = TREE_TYPE (arg);
2539 mode = TYPE_MODE (TREE_TYPE (arg));
2541 /* Try expanding via a sincos optab, fall back to emitting a libcall
2542 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2543 is only generated from sincos, cexp or if we have either of them. */
2544 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2546 op1 = gen_reg_rtx (mode);
2547 op2 = gen_reg_rtx (mode);
2549 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2551 /* Compute into op1 and op2. */
2552 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2554 else if (TARGET_HAS_SINCOS)
2556 tree call, fn = NULL_TREE;
2557 tree top1, top2;
2558 rtx op1a, op2a;
2560 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2561 fn = built_in_decls[BUILT_IN_SINCOSF];
2562 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2563 fn = built_in_decls[BUILT_IN_SINCOS];
2564 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2565 fn = built_in_decls[BUILT_IN_SINCOSL];
2566 else
2567 gcc_unreachable ();
2569 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2570 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2571 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2572 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2573 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2574 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2576 /* Make sure not to fold the sincos call again. */
2577 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2578 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2579 call, 3, arg, top1, top2));
2581 else
2583 tree call, fn = NULL_TREE, narg;
2584 tree ctype = build_complex_type (type);
2586 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2587 fn = built_in_decls[BUILT_IN_CEXPF];
2588 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2589 fn = built_in_decls[BUILT_IN_CEXP];
2590 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2591 fn = built_in_decls[BUILT_IN_CEXPL];
2592 else
2593 gcc_unreachable ();
2595 /* If we don't have a decl for cexp create one. This is the
2596 friendliest fallback if the user calls __builtin_cexpi
2597 without full target C99 function support. */
2598 if (fn == NULL_TREE)
2600 tree fntype;
2601 const char *name = NULL;
2603 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2604 name = "cexpf";
2605 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2606 name = "cexp";
2607 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2608 name = "cexpl";
2610 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2611 fn = build_fn_decl (name, fntype);
2614 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2615 build_real (type, dconst0), arg);
2617 /* Make sure not to fold the cexp call again. */
2618 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2619 return expand_expr (build_call_nary (ctype, call, 1, narg),
2620 target, VOIDmode, EXPAND_NORMAL);
2623 /* Now build the proper return type. */
2624 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2625 make_tree (TREE_TYPE (arg), op2),
2626 make_tree (TREE_TYPE (arg), op1)),
2627 target, VOIDmode, EXPAND_NORMAL);
2630 /* Conveniently construct a function call expression. FNDECL names the
2631 function to be called, N is the number of arguments, and the "..."
2632 parameters are the argument expressions. Unlike build_call_exr
2633 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2635 static tree
2636 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2638 va_list ap;
2639 tree fntype = TREE_TYPE (fndecl);
2640 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2642 va_start (ap, n);
2643 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2644 va_end (ap);
2645 SET_EXPR_LOCATION (fn, loc);
2646 return fn;
2649 /* Expand a call to one of the builtin rounding functions gcc defines
2650 as an extension (lfloor and lceil). As these are gcc extensions we
2651 do not need to worry about setting errno to EDOM.
2652 If expanding via optab fails, lower expression to (int)(floor(x)).
2653 EXP is the expression that is a call to the builtin function;
2654 if convenient, the result should be placed in TARGET. */
2656 static rtx
2657 expand_builtin_int_roundingfn (tree exp, rtx target)
2659 convert_optab builtin_optab;
2660 rtx op0, insns, tmp;
2661 tree fndecl = get_callee_fndecl (exp);
2662 enum built_in_function fallback_fn;
2663 tree fallback_fndecl;
2664 enum machine_mode mode;
2665 tree arg;
2667 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2668 gcc_unreachable ();
2670 arg = CALL_EXPR_ARG (exp, 0);
2672 switch (DECL_FUNCTION_CODE (fndecl))
2674 CASE_FLT_FN (BUILT_IN_LCEIL):
2675 CASE_FLT_FN (BUILT_IN_LLCEIL):
2676 builtin_optab = lceil_optab;
2677 fallback_fn = BUILT_IN_CEIL;
2678 break;
2680 CASE_FLT_FN (BUILT_IN_LFLOOR):
2681 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2682 builtin_optab = lfloor_optab;
2683 fallback_fn = BUILT_IN_FLOOR;
2684 break;
2686 default:
2687 gcc_unreachable ();
2690 /* Make a suitable register to place result in. */
2691 mode = TYPE_MODE (TREE_TYPE (exp));
2693 target = gen_reg_rtx (mode);
2695 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2696 need to expand the argument again. This way, we will not perform
2697 side-effects more the once. */
2698 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2700 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2702 start_sequence ();
2704 /* Compute into TARGET. */
2705 if (expand_sfix_optab (target, op0, builtin_optab))
2707 /* Output the entire sequence. */
2708 insns = get_insns ();
2709 end_sequence ();
2710 emit_insn (insns);
2711 return target;
2714 /* If we were unable to expand via the builtin, stop the sequence
2715 (without outputting the insns). */
2716 end_sequence ();
2718 /* Fall back to floating point rounding optab. */
2719 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2721 /* For non-C99 targets we may end up without a fallback fndecl here
2722 if the user called __builtin_lfloor directly. In this case emit
2723 a call to the floor/ceil variants nevertheless. This should result
2724 in the best user experience for not full C99 targets. */
2725 if (fallback_fndecl == NULL_TREE)
2727 tree fntype;
2728 const char *name = NULL;
2730 switch (DECL_FUNCTION_CODE (fndecl))
2732 case BUILT_IN_LCEIL:
2733 case BUILT_IN_LLCEIL:
2734 name = "ceil";
2735 break;
2736 case BUILT_IN_LCEILF:
2737 case BUILT_IN_LLCEILF:
2738 name = "ceilf";
2739 break;
2740 case BUILT_IN_LCEILL:
2741 case BUILT_IN_LLCEILL:
2742 name = "ceill";
2743 break;
2744 case BUILT_IN_LFLOOR:
2745 case BUILT_IN_LLFLOOR:
2746 name = "floor";
2747 break;
2748 case BUILT_IN_LFLOORF:
2749 case BUILT_IN_LLFLOORF:
2750 name = "floorf";
2751 break;
2752 case BUILT_IN_LFLOORL:
2753 case BUILT_IN_LLFLOORL:
2754 name = "floorl";
2755 break;
2756 default:
2757 gcc_unreachable ();
2760 fntype = build_function_type_list (TREE_TYPE (arg),
2761 TREE_TYPE (arg), NULL_TREE);
2762 fallback_fndecl = build_fn_decl (name, fntype);
2765 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2767 tmp = expand_normal (exp);
2769 /* Truncate the result of floating point optab to integer
2770 via expand_fix (). */
2771 target = gen_reg_rtx (mode);
2772 expand_fix (target, tmp, 0);
2774 return target;
2777 /* Expand a call to one of the builtin math functions doing integer
2778 conversion (lrint).
2779 Return 0 if a normal call should be emitted rather than expanding the
2780 function in-line. EXP is the expression that is a call to the builtin
2781 function; if convenient, the result should be placed in TARGET. */
2783 static rtx
2784 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2786 convert_optab builtin_optab;
2787 rtx op0, insns;
2788 tree fndecl = get_callee_fndecl (exp);
2789 tree arg;
2790 enum machine_mode mode;
2792 /* There's no easy way to detect the case we need to set EDOM. */
2793 if (flag_errno_math)
2794 return NULL_RTX;
2796 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2797 gcc_unreachable ();
2799 arg = CALL_EXPR_ARG (exp, 0);
2801 switch (DECL_FUNCTION_CODE (fndecl))
2803 CASE_FLT_FN (BUILT_IN_LRINT):
2804 CASE_FLT_FN (BUILT_IN_LLRINT):
2805 builtin_optab = lrint_optab; break;
2806 CASE_FLT_FN (BUILT_IN_LROUND):
2807 CASE_FLT_FN (BUILT_IN_LLROUND):
2808 builtin_optab = lround_optab; break;
2809 default:
2810 gcc_unreachable ();
2813 /* Make a suitable register to place result in. */
2814 mode = TYPE_MODE (TREE_TYPE (exp));
2816 target = gen_reg_rtx (mode);
2818 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2819 need to expand the argument again. This way, we will not perform
2820 side-effects more the once. */
2821 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2823 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2825 start_sequence ();
2827 if (expand_sfix_optab (target, op0, builtin_optab))
2829 /* Output the entire sequence. */
2830 insns = get_insns ();
2831 end_sequence ();
2832 emit_insn (insns);
2833 return target;
2836 /* If we were unable to expand via the builtin, stop the sequence
2837 (without outputting the insns) and call to the library function
2838 with the stabilized argument list. */
2839 end_sequence ();
2841 target = expand_call (exp, target, target == const0_rtx);
2843 return target;
2846 /* To evaluate powi(x,n), the floating point value x raised to the
2847 constant integer exponent n, we use a hybrid algorithm that
2848 combines the "window method" with look-up tables. For an
2849 introduction to exponentiation algorithms and "addition chains",
2850 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2851 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2852 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2853 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2855 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2856 multiplications to inline before calling the system library's pow
2857 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2858 so this default never requires calling pow, powf or powl. */
2860 #ifndef POWI_MAX_MULTS
2861 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2862 #endif
2864 /* The size of the "optimal power tree" lookup table. All
2865 exponents less than this value are simply looked up in the
2866 powi_table below. This threshold is also used to size the
2867 cache of pseudo registers that hold intermediate results. */
2868 #define POWI_TABLE_SIZE 256
2870 /* The size, in bits of the window, used in the "window method"
2871 exponentiation algorithm. This is equivalent to a radix of
2872 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2873 #define POWI_WINDOW_SIZE 3
2875 /* The following table is an efficient representation of an
2876 "optimal power tree". For each value, i, the corresponding
2877 value, j, in the table states than an optimal evaluation
2878 sequence for calculating pow(x,i) can be found by evaluating
2879 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2880 100 integers is given in Knuth's "Seminumerical algorithms". */
2882 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2884 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2885 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2886 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2887 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2888 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2889 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2890 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2891 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2892 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2893 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2894 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2895 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2896 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2897 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2898 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2899 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2900 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2901 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2902 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2903 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2904 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2905 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2906 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2907 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2908 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2909 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2910 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2911 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2912 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2913 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2914 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2915 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2919 /* Return the number of multiplications required to calculate
2920 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2921 subroutine of powi_cost. CACHE is an array indicating
2922 which exponents have already been calculated. */
2924 static int
2925 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2927 /* If we've already calculated this exponent, then this evaluation
2928 doesn't require any additional multiplications. */
2929 if (cache[n])
2930 return 0;
2932 cache[n] = true;
2933 return powi_lookup_cost (n - powi_table[n], cache)
2934 + powi_lookup_cost (powi_table[n], cache) + 1;
2937 /* Return the number of multiplications required to calculate
2938 powi(x,n) for an arbitrary x, given the exponent N. This
2939 function needs to be kept in sync with expand_powi below. */
2941 static int
2942 powi_cost (HOST_WIDE_INT n)
2944 bool cache[POWI_TABLE_SIZE];
2945 unsigned HOST_WIDE_INT digit;
2946 unsigned HOST_WIDE_INT val;
2947 int result;
2949 if (n == 0)
2950 return 0;
2952 /* Ignore the reciprocal when calculating the cost. */
2953 val = (n < 0) ? -n : n;
2955 /* Initialize the exponent cache. */
2956 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2957 cache[1] = true;
2959 result = 0;
2961 while (val >= POWI_TABLE_SIZE)
2963 if (val & 1)
2965 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2966 result += powi_lookup_cost (digit, cache)
2967 + POWI_WINDOW_SIZE + 1;
2968 val >>= POWI_WINDOW_SIZE;
2970 else
2972 val >>= 1;
2973 result++;
2977 return result + powi_lookup_cost (val, cache);
2980 /* Recursive subroutine of expand_powi. This function takes the array,
2981 CACHE, of already calculated exponents and an exponent N and returns
2982 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2984 static rtx
2985 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2987 unsigned HOST_WIDE_INT digit;
2988 rtx target, result;
2989 rtx op0, op1;
2991 if (n < POWI_TABLE_SIZE)
2993 if (cache[n])
2994 return cache[n];
2996 target = gen_reg_rtx (mode);
2997 cache[n] = target;
2999 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
3000 op1 = expand_powi_1 (mode, powi_table[n], cache);
3002 else if (n & 1)
3004 target = gen_reg_rtx (mode);
3005 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
3006 op0 = expand_powi_1 (mode, n - digit, cache);
3007 op1 = expand_powi_1 (mode, digit, cache);
3009 else
3011 target = gen_reg_rtx (mode);
3012 op0 = expand_powi_1 (mode, n >> 1, cache);
3013 op1 = op0;
3016 result = expand_mult (mode, op0, op1, target, 0);
3017 if (result != target)
3018 emit_move_insn (target, result);
3019 return target;
3022 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
3023 floating point operand in mode MODE, and N is the exponent. This
3024 function needs to be kept in sync with powi_cost above. */
3026 static rtx
3027 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
3029 rtx cache[POWI_TABLE_SIZE];
3030 rtx result;
3032 if (n == 0)
3033 return CONST1_RTX (mode);
3035 memset (cache, 0, sizeof (cache));
3036 cache[1] = x;
3038 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
3040 /* If the original exponent was negative, reciprocate the result. */
3041 if (n < 0)
3042 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3043 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3045 return result;
3048 /* Fold a builtin function call to pow, powf, or powl into a series of sqrts or
3049 cbrts. Return NULL_RTX if no simplification can be made or expand the tree
3050 if we can simplify it. */
3051 static rtx
3052 expand_builtin_pow_root (location_t loc, tree arg0, tree arg1, tree type,
3053 rtx subtarget)
3055 if (TREE_CODE (arg1) == REAL_CST
3056 && !TREE_OVERFLOW (arg1)
3057 && flag_unsafe_math_optimizations)
3059 enum machine_mode mode = TYPE_MODE (type);
3060 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
3061 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
3062 REAL_VALUE_TYPE c = TREE_REAL_CST (arg1);
3063 tree op = NULL_TREE;
3065 if (sqrtfn)
3067 /* Optimize pow (x, 0.5) into sqrt. */
3068 if (REAL_VALUES_EQUAL (c, dconsthalf))
3069 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3071 /* Don't do this optimization if we don't have a sqrt insn. */
3072 else if (optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)
3074 REAL_VALUE_TYPE dconst1_4 = dconst1;
3075 REAL_VALUE_TYPE dconst3_4;
3076 SET_REAL_EXP (&dconst1_4, REAL_EXP (&dconst1_4) - 2);
3078 real_from_integer (&dconst3_4, VOIDmode, 3, 0, 0);
3079 SET_REAL_EXP (&dconst3_4, REAL_EXP (&dconst3_4) - 2);
3081 /* Optimize pow (x, 0.25) into sqrt (sqrt (x)). Assume on most
3082 machines that a builtin sqrt instruction is smaller than a
3083 call to pow with 0.25, so do this optimization even if
3084 -Os. */
3085 if (REAL_VALUES_EQUAL (c, dconst1_4))
3087 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3088 op = build_call_nofold_loc (loc, sqrtfn, 1, op);
3091 /* Optimize pow (x, 0.75) = sqrt (x) * sqrt (sqrt (x)) unless we
3092 are optimizing for space. */
3093 else if (optimize_insn_for_speed_p ()
3094 && !TREE_SIDE_EFFECTS (arg0)
3095 && REAL_VALUES_EQUAL (c, dconst3_4))
3097 tree sqrt1 = build_call_expr_loc (loc, sqrtfn, 1, arg0);
3098 tree sqrt2 = builtin_save_expr (sqrt1);
3099 tree sqrt3 = build_call_expr_loc (loc, sqrtfn, 1, sqrt1);
3100 op = fold_build2_loc (loc, MULT_EXPR, type, sqrt2, sqrt3);
3105 /* Check whether we can do cbrt insstead of pow (x, 1./3.) and
3106 cbrt/sqrts instead of pow (x, 1./6.). */
3107 if (cbrtfn && ! op
3108 && (tree_expr_nonnegative_p (arg0) || !HONOR_NANS (mode)))
3110 /* First try 1/3. */
3111 REAL_VALUE_TYPE dconst1_3
3112 = real_value_truncate (mode, dconst_third ());
3114 if (REAL_VALUES_EQUAL (c, dconst1_3))
3115 op = build_call_nofold_loc (loc, cbrtfn, 1, arg0);
3117 /* Now try 1/6. */
3118 else if (optimize_insn_for_speed_p ()
3119 && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)
3121 REAL_VALUE_TYPE dconst1_6 = dconst1_3;
3122 SET_REAL_EXP (&dconst1_6, REAL_EXP (&dconst1_6) - 1);
3124 if (REAL_VALUES_EQUAL (c, dconst1_6))
3126 op = build_call_nofold_loc (loc, sqrtfn, 1, arg0);
3127 op = build_call_nofold_loc (loc, cbrtfn, 1, op);
3132 if (op)
3133 return expand_expr (op, subtarget, mode, EXPAND_NORMAL);
3136 return NULL_RTX;
3139 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
3140 a normal call should be emitted rather than expanding the function
3141 in-line. EXP is the expression that is a call to the builtin
3142 function; if convenient, the result should be placed in TARGET. */
3144 static rtx
3145 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
3147 tree arg0, arg1;
3148 tree fn, narg0;
3149 tree type = TREE_TYPE (exp);
3150 REAL_VALUE_TYPE cint, c, c2;
3151 HOST_WIDE_INT n;
3152 rtx op, op2;
3153 enum machine_mode mode = TYPE_MODE (type);
3155 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
3156 return NULL_RTX;
3158 arg0 = CALL_EXPR_ARG (exp, 0);
3159 arg1 = CALL_EXPR_ARG (exp, 1);
3161 if (TREE_CODE (arg1) != REAL_CST
3162 || TREE_OVERFLOW (arg1))
3163 return expand_builtin_mathfn_2 (exp, target, subtarget);
3165 /* Handle constant exponents. */
3167 /* For integer valued exponents we can expand to an optimal multiplication
3168 sequence using expand_powi. */
3169 c = TREE_REAL_CST (arg1);
3170 n = real_to_integer (&c);
3171 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3172 if (real_identical (&c, &cint)
3173 && ((n >= -1 && n <= 2)
3174 || (flag_unsafe_math_optimizations
3175 && optimize_insn_for_speed_p ()
3176 && powi_cost (n) <= POWI_MAX_MULTS)))
3178 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3179 if (n != 1)
3181 op = force_reg (mode, op);
3182 op = expand_powi (op, mode, n);
3184 return op;
3187 narg0 = builtin_save_expr (arg0);
3189 /* If the exponent is not integer valued, check if it is half of an integer.
3190 In this case we can expand to sqrt (x) * x**(n/2). */
3191 fn = mathfn_built_in (type, BUILT_IN_SQRT);
3192 if (fn != NULL_TREE)
3194 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
3195 n = real_to_integer (&c2);
3196 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3197 if (real_identical (&c2, &cint)
3198 && ((flag_unsafe_math_optimizations
3199 && optimize_insn_for_speed_p ()
3200 && powi_cost (n/2) <= POWI_MAX_MULTS)
3201 /* Even the c == 0.5 case cannot be done unconditionally
3202 when we need to preserve signed zeros, as
3203 pow (-0, 0.5) is +0, while sqrt(-0) is -0. */
3204 || (!HONOR_SIGNED_ZEROS (mode) && n == 1)
3205 /* For c == 1.5 we can assume that x * sqrt (x) is always
3206 smaller than pow (x, 1.5) if sqrt will not be expanded
3207 as a call. */
3208 || (n == 3
3209 && optab_handler (sqrt_optab, mode) != CODE_FOR_nothing)))
3211 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3212 narg0);
3213 /* Use expand_expr in case the newly built call expression
3214 was folded to a non-call. */
3215 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
3216 if (n != 1)
3218 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3219 op2 = force_reg (mode, op2);
3220 op2 = expand_powi (op2, mode, abs (n / 2));
3221 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3222 0, OPTAB_LIB_WIDEN);
3223 /* If the original exponent was negative, reciprocate the
3224 result. */
3225 if (n < 0)
3226 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3227 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3229 return op;
3233 /* Check whether we can do a series of sqrt or cbrt's instead of the pow
3234 call. */
3235 op = expand_builtin_pow_root (EXPR_LOCATION (exp), arg0, arg1, type,
3236 subtarget);
3237 if (op)
3238 return op;
3240 /* Try if the exponent is a third of an integer. In this case
3241 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
3242 different from pow (x, 1./3.) due to rounding and behavior
3243 with negative x we need to constrain this transformation to
3244 unsafe math and positive x or finite math. */
3245 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3246 if (fn != NULL_TREE
3247 && flag_unsafe_math_optimizations
3248 && (tree_expr_nonnegative_p (arg0)
3249 || !HONOR_NANS (mode)))
3251 REAL_VALUE_TYPE dconst3;
3252 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3253 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3254 real_round (&c2, mode, &c2);
3255 n = real_to_integer (&c2);
3256 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3257 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3258 real_convert (&c2, mode, &c2);
3259 if (real_identical (&c2, &c)
3260 && ((optimize_insn_for_speed_p ()
3261 && powi_cost (n/3) <= POWI_MAX_MULTS)
3262 || n == 1))
3264 tree call_expr = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 1,
3265 narg0);
3266 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3267 if (abs (n) % 3 == 2)
3268 op = expand_simple_binop (mode, MULT, op, op, op,
3269 0, OPTAB_LIB_WIDEN);
3270 if (n != 1)
3272 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3273 op2 = force_reg (mode, op2);
3274 op2 = expand_powi (op2, mode, abs (n / 3));
3275 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3276 0, OPTAB_LIB_WIDEN);
3277 /* If the original exponent was negative, reciprocate the
3278 result. */
3279 if (n < 0)
3280 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3281 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3283 return op;
3287 /* Fall back to optab expansion. */
3288 return expand_builtin_mathfn_2 (exp, target, subtarget);
3291 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3292 a normal call should be emitted rather than expanding the function
3293 in-line. EXP is the expression that is a call to the builtin
3294 function; if convenient, the result should be placed in TARGET. */
3296 static rtx
3297 expand_builtin_powi (tree exp, rtx target)
3299 tree arg0, arg1;
3300 rtx op0, op1;
3301 enum machine_mode mode;
3302 enum machine_mode mode2;
3304 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3305 return NULL_RTX;
3307 arg0 = CALL_EXPR_ARG (exp, 0);
3308 arg1 = CALL_EXPR_ARG (exp, 1);
3309 mode = TYPE_MODE (TREE_TYPE (exp));
3311 /* Handle constant power. */
3313 if (TREE_CODE (arg1) == INTEGER_CST
3314 && !TREE_OVERFLOW (arg1))
3316 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3318 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3319 Otherwise, check the number of multiplications required. */
3320 if ((TREE_INT_CST_HIGH (arg1) == 0
3321 || TREE_INT_CST_HIGH (arg1) == -1)
3322 && ((n >= -1 && n <= 2)
3323 || (optimize_insn_for_speed_p ()
3324 && powi_cost (n) <= POWI_MAX_MULTS)))
3326 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3327 op0 = force_reg (mode, op0);
3328 return expand_powi (op0, mode, n);
3332 /* Emit a libcall to libgcc. */
3334 /* Mode of the 2nd argument must match that of an int. */
3335 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3337 if (target == NULL_RTX)
3338 target = gen_reg_rtx (mode);
3340 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3341 if (GET_MODE (op0) != mode)
3342 op0 = convert_to_mode (mode, op0, 0);
3343 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3344 if (GET_MODE (op1) != mode2)
3345 op1 = convert_to_mode (mode2, op1, 0);
3347 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3348 target, LCT_CONST, mode, 2,
3349 op0, mode, op1, mode2);
3351 return target;
3354 /* Expand expression EXP which is a call to the strlen builtin. Return
3355 NULL_RTX if we failed the caller should emit a normal call, otherwise
3356 try to get the result in TARGET, if convenient. */
3358 static rtx
3359 expand_builtin_strlen (tree exp, rtx target,
3360 enum machine_mode target_mode)
3362 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3363 return NULL_RTX;
3364 else
3366 rtx pat;
3367 tree len;
3368 tree src = CALL_EXPR_ARG (exp, 0);
3369 rtx result, src_reg, char_rtx, before_strlen;
3370 enum machine_mode insn_mode = target_mode, char_mode;
3371 enum insn_code icode = CODE_FOR_nothing;
3372 unsigned int align;
3374 /* If the length can be computed at compile-time, return it. */
3375 len = c_strlen (src, 0);
3376 if (len)
3377 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3379 /* If the length can be computed at compile-time and is constant
3380 integer, but there are side-effects in src, evaluate
3381 src for side-effects, then return len.
3382 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3383 can be optimized into: i++; x = 3; */
3384 len = c_strlen (src, 1);
3385 if (len && TREE_CODE (len) == INTEGER_CST)
3387 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3388 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3391 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3393 /* If SRC is not a pointer type, don't do this operation inline. */
3394 if (align == 0)
3395 return NULL_RTX;
3397 /* Bail out if we can't compute strlen in the right mode. */
3398 while (insn_mode != VOIDmode)
3400 icode = optab_handler (strlen_optab, insn_mode);
3401 if (icode != CODE_FOR_nothing)
3402 break;
3404 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3406 if (insn_mode == VOIDmode)
3407 return NULL_RTX;
3409 /* Make a place to write the result of the instruction. */
3410 result = target;
3411 if (! (result != 0
3412 && REG_P (result)
3413 && GET_MODE (result) == insn_mode
3414 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3415 result = gen_reg_rtx (insn_mode);
3417 /* Make a place to hold the source address. We will not expand
3418 the actual source until we are sure that the expansion will
3419 not fail -- there are trees that cannot be expanded twice. */
3420 src_reg = gen_reg_rtx (Pmode);
3422 /* Mark the beginning of the strlen sequence so we can emit the
3423 source operand later. */
3424 before_strlen = get_last_insn ();
3426 char_rtx = const0_rtx;
3427 char_mode = insn_data[(int) icode].operand[2].mode;
3428 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3429 char_mode))
3430 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3432 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3433 char_rtx, GEN_INT (align));
3434 if (! pat)
3435 return NULL_RTX;
3436 emit_insn (pat);
3438 /* Now that we are assured of success, expand the source. */
3439 start_sequence ();
3440 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3441 if (pat != src_reg)
3442 emit_move_insn (src_reg, pat);
3443 pat = get_insns ();
3444 end_sequence ();
3446 if (before_strlen)
3447 emit_insn_after (pat, before_strlen);
3448 else
3449 emit_insn_before (pat, get_insns ());
3451 /* Return the value in the proper mode for this function. */
3452 if (GET_MODE (result) == target_mode)
3453 target = result;
3454 else if (target != 0)
3455 convert_move (target, result, 0);
3456 else
3457 target = convert_to_mode (target_mode, result, 0);
3459 return target;
3463 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3464 bytes from constant string DATA + OFFSET and return it as target
3465 constant. */
3467 static rtx
3468 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3469 enum machine_mode mode)
3471 const char *str = (const char *) data;
3473 gcc_assert (offset >= 0
3474 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3475 <= strlen (str) + 1));
3477 return c_readstr (str + offset, mode);
3480 /* Expand a call EXP to the memcpy builtin.
3481 Return NULL_RTX if we failed, the caller should emit a normal call,
3482 otherwise try to get the result in TARGET, if convenient (and in
3483 mode MODE if that's convenient). */
3485 static rtx
3486 expand_builtin_memcpy (tree exp, rtx target)
3488 if (!validate_arglist (exp,
3489 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3490 return NULL_RTX;
3491 else
3493 tree dest = CALL_EXPR_ARG (exp, 0);
3494 tree src = CALL_EXPR_ARG (exp, 1);
3495 tree len = CALL_EXPR_ARG (exp, 2);
3496 const char *src_str;
3497 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3498 unsigned int dest_align
3499 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3500 rtx dest_mem, src_mem, dest_addr, len_rtx;
3501 HOST_WIDE_INT expected_size = -1;
3502 unsigned int expected_align = 0;
3504 /* If DEST is not a pointer type, call the normal function. */
3505 if (dest_align == 0)
3506 return NULL_RTX;
3508 /* If either SRC is not a pointer type, don't do this
3509 operation in-line. */
3510 if (src_align == 0)
3511 return NULL_RTX;
3513 if (currently_expanding_gimple_stmt)
3514 stringop_block_profile (currently_expanding_gimple_stmt,
3515 &expected_align, &expected_size);
3517 if (expected_align < dest_align)
3518 expected_align = dest_align;
3519 dest_mem = get_memory_rtx (dest, len);
3520 set_mem_align (dest_mem, dest_align);
3521 len_rtx = expand_normal (len);
3522 src_str = c_getstr (src);
3524 /* If SRC is a string constant and block move would be done
3525 by pieces, we can avoid loading the string from memory
3526 and only stored the computed constants. */
3527 if (src_str
3528 && CONST_INT_P (len_rtx)
3529 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3530 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3531 CONST_CAST (char *, src_str),
3532 dest_align, false))
3534 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3535 builtin_memcpy_read_str,
3536 CONST_CAST (char *, src_str),
3537 dest_align, false, 0);
3538 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3539 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3540 return dest_mem;
3543 src_mem = get_memory_rtx (src, len);
3544 set_mem_align (src_mem, src_align);
3546 /* Copy word part most expediently. */
3547 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3548 CALL_EXPR_TAILCALL (exp)
3549 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3550 expected_align, expected_size);
3552 if (dest_addr == 0)
3554 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3555 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3557 return dest_addr;
3561 /* Expand a call EXP to the mempcpy builtin.
3562 Return NULL_RTX if we failed; the caller should emit a normal call,
3563 otherwise try to get the result in TARGET, if convenient (and in
3564 mode MODE if that's convenient). If ENDP is 0 return the
3565 destination pointer, if ENDP is 1 return the end pointer ala
3566 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3567 stpcpy. */
3569 static rtx
3570 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3572 if (!validate_arglist (exp,
3573 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3574 return NULL_RTX;
3575 else
3577 tree dest = CALL_EXPR_ARG (exp, 0);
3578 tree src = CALL_EXPR_ARG (exp, 1);
3579 tree len = CALL_EXPR_ARG (exp, 2);
3580 return expand_builtin_mempcpy_args (dest, src, len,
3581 target, mode, /*endp=*/ 1);
3585 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3586 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3587 so that this can also be called without constructing an actual CALL_EXPR.
3588 The other arguments and return value are the same as for
3589 expand_builtin_mempcpy. */
3591 static rtx
3592 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3593 rtx target, enum machine_mode mode, int endp)
3595 /* If return value is ignored, transform mempcpy into memcpy. */
3596 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3598 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3599 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3600 dest, src, len);
3601 return expand_expr (result, target, mode, EXPAND_NORMAL);
3603 else
3605 const char *src_str;
3606 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3607 unsigned int dest_align
3608 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3609 rtx dest_mem, src_mem, len_rtx;
3611 /* If either SRC or DEST is not a pointer type, don't do this
3612 operation in-line. */
3613 if (dest_align == 0 || src_align == 0)
3614 return NULL_RTX;
3616 /* If LEN is not constant, call the normal function. */
3617 if (! host_integerp (len, 1))
3618 return NULL_RTX;
3620 len_rtx = expand_normal (len);
3621 src_str = c_getstr (src);
3623 /* If SRC is a string constant and block move would be done
3624 by pieces, we can avoid loading the string from memory
3625 and only stored the computed constants. */
3626 if (src_str
3627 && CONST_INT_P (len_rtx)
3628 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3629 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3630 CONST_CAST (char *, src_str),
3631 dest_align, false))
3633 dest_mem = get_memory_rtx (dest, len);
3634 set_mem_align (dest_mem, dest_align);
3635 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3636 builtin_memcpy_read_str,
3637 CONST_CAST (char *, src_str),
3638 dest_align, false, endp);
3639 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3640 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3641 return dest_mem;
3644 if (CONST_INT_P (len_rtx)
3645 && can_move_by_pieces (INTVAL (len_rtx),
3646 MIN (dest_align, src_align)))
3648 dest_mem = get_memory_rtx (dest, len);
3649 set_mem_align (dest_mem, dest_align);
3650 src_mem = get_memory_rtx (src, len);
3651 set_mem_align (src_mem, src_align);
3652 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3653 MIN (dest_align, src_align), endp);
3654 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3655 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3656 return dest_mem;
3659 return NULL_RTX;
3663 #ifndef HAVE_movstr
3664 # define HAVE_movstr 0
3665 # define CODE_FOR_movstr CODE_FOR_nothing
3666 #endif
3668 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3669 we failed, the caller should emit a normal call, otherwise try to
3670 get the result in TARGET, if convenient. If ENDP is 0 return the
3671 destination pointer, if ENDP is 1 return the end pointer ala
3672 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3673 stpcpy. */
3675 static rtx
3676 expand_movstr (tree dest, tree src, rtx target, int endp)
3678 rtx end;
3679 rtx dest_mem;
3680 rtx src_mem;
3681 rtx insn;
3682 const struct insn_data_d * data;
3684 if (!HAVE_movstr)
3685 return NULL_RTX;
3687 dest_mem = get_memory_rtx (dest, NULL);
3688 src_mem = get_memory_rtx (src, NULL);
3689 data = insn_data + CODE_FOR_movstr;
3690 if (!endp)
3692 target = force_reg (Pmode, XEXP (dest_mem, 0));
3693 dest_mem = replace_equiv_address (dest_mem, target);
3694 end = gen_reg_rtx (Pmode);
3696 else
3698 if (target == 0
3699 || target == const0_rtx
3700 || ! (*data->operand[0].predicate) (target, Pmode))
3702 end = gen_reg_rtx (Pmode);
3703 if (target != const0_rtx)
3704 target = end;
3706 else
3707 end = target;
3710 if (data->operand[0].mode != VOIDmode)
3711 end = gen_lowpart (data->operand[0].mode, end);
3713 insn = data->genfun (end, dest_mem, src_mem);
3715 gcc_assert (insn);
3717 emit_insn (insn);
3719 /* movstr is supposed to set end to the address of the NUL
3720 terminator. If the caller requested a mempcpy-like return value,
3721 adjust it. */
3722 if (endp == 1 && target != const0_rtx)
3724 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3725 emit_move_insn (target, force_operand (tem, NULL_RTX));
3728 return target;
3731 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3732 NULL_RTX if we failed the caller should emit a normal call, otherwise
3733 try to get the result in TARGET, if convenient (and in mode MODE if that's
3734 convenient). */
3736 static rtx
3737 expand_builtin_strcpy (tree exp, rtx target)
3739 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3741 tree dest = CALL_EXPR_ARG (exp, 0);
3742 tree src = CALL_EXPR_ARG (exp, 1);
3743 return expand_builtin_strcpy_args (dest, src, target);
3745 return NULL_RTX;
3748 /* Helper function to do the actual work for expand_builtin_strcpy. The
3749 arguments to the builtin_strcpy call DEST and SRC are broken out
3750 so that this can also be called without constructing an actual CALL_EXPR.
3751 The other arguments and return value are the same as for
3752 expand_builtin_strcpy. */
3754 static rtx
3755 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3757 return expand_movstr (dest, src, target, /*endp=*/0);
3760 /* Expand a call EXP to the stpcpy builtin.
3761 Return NULL_RTX if we failed the caller should emit a normal call,
3762 otherwise try to get the result in TARGET, if convenient (and in
3763 mode MODE if that's convenient). */
3765 static rtx
3766 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3768 tree dst, src;
3769 location_t loc = EXPR_LOCATION (exp);
3771 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3772 return NULL_RTX;
3774 dst = CALL_EXPR_ARG (exp, 0);
3775 src = CALL_EXPR_ARG (exp, 1);
3777 /* If return value is ignored, transform stpcpy into strcpy. */
3778 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3780 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3781 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3782 return expand_expr (result, target, mode, EXPAND_NORMAL);
3784 else
3786 tree len, lenp1;
3787 rtx ret;
3789 /* Ensure we get an actual string whose length can be evaluated at
3790 compile-time, not an expression containing a string. This is
3791 because the latter will potentially produce pessimized code
3792 when used to produce the return value. */
3793 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3794 return expand_movstr (dst, src, target, /*endp=*/2);
3796 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3797 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3798 target, mode, /*endp=*/2);
3800 if (ret)
3801 return ret;
3803 if (TREE_CODE (len) == INTEGER_CST)
3805 rtx len_rtx = expand_normal (len);
3807 if (CONST_INT_P (len_rtx))
3809 ret = expand_builtin_strcpy_args (dst, src, target);
3811 if (ret)
3813 if (! target)
3815 if (mode != VOIDmode)
3816 target = gen_reg_rtx (mode);
3817 else
3818 target = gen_reg_rtx (GET_MODE (ret));
3820 if (GET_MODE (target) != GET_MODE (ret))
3821 ret = gen_lowpart (GET_MODE (target), ret);
3823 ret = plus_constant (ret, INTVAL (len_rtx));
3824 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3825 gcc_assert (ret);
3827 return target;
3832 return expand_movstr (dst, src, target, /*endp=*/2);
3836 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3837 bytes from constant string DATA + OFFSET and return it as target
3838 constant. */
3841 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3842 enum machine_mode mode)
3844 const char *str = (const char *) data;
3846 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3847 return const0_rtx;
3849 return c_readstr (str + offset, mode);
3852 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3853 NULL_RTX if we failed the caller should emit a normal call. */
3855 static rtx
3856 expand_builtin_strncpy (tree exp, rtx target)
3858 location_t loc = EXPR_LOCATION (exp);
3860 if (validate_arglist (exp,
3861 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3863 tree dest = CALL_EXPR_ARG (exp, 0);
3864 tree src = CALL_EXPR_ARG (exp, 1);
3865 tree len = CALL_EXPR_ARG (exp, 2);
3866 tree slen = c_strlen (src, 1);
3868 /* We must be passed a constant len and src parameter. */
3869 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3870 return NULL_RTX;
3872 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3874 /* We're required to pad with trailing zeros if the requested
3875 len is greater than strlen(s2)+1. In that case try to
3876 use store_by_pieces, if it fails, punt. */
3877 if (tree_int_cst_lt (slen, len))
3879 unsigned int dest_align
3880 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3881 const char *p = c_getstr (src);
3882 rtx dest_mem;
3884 if (!p || dest_align == 0 || !host_integerp (len, 1)
3885 || !can_store_by_pieces (tree_low_cst (len, 1),
3886 builtin_strncpy_read_str,
3887 CONST_CAST (char *, p),
3888 dest_align, false))
3889 return NULL_RTX;
3891 dest_mem = get_memory_rtx (dest, len);
3892 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3893 builtin_strncpy_read_str,
3894 CONST_CAST (char *, p), dest_align, false, 0);
3895 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3896 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3897 return dest_mem;
3900 return NULL_RTX;
3903 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3904 bytes from constant string DATA + OFFSET and return it as target
3905 constant. */
3908 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3909 enum machine_mode mode)
3911 const char *c = (const char *) data;
3912 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3914 memset (p, *c, GET_MODE_SIZE (mode));
3916 return c_readstr (p, mode);
3919 /* Callback routine for store_by_pieces. Return the RTL of a register
3920 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3921 char value given in the RTL register data. For example, if mode is
3922 4 bytes wide, return the RTL for 0x01010101*data. */
3924 static rtx
3925 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3926 enum machine_mode mode)
3928 rtx target, coeff;
3929 size_t size;
3930 char *p;
3932 size = GET_MODE_SIZE (mode);
3933 if (size == 1)
3934 return (rtx) data;
3936 p = XALLOCAVEC (char, size);
3937 memset (p, 1, size);
3938 coeff = c_readstr (p, mode);
3940 target = convert_to_mode (mode, (rtx) data, 1);
3941 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3942 return force_reg (mode, target);
3945 /* Expand expression EXP, which is a call to the memset builtin. Return
3946 NULL_RTX if we failed the caller should emit a normal call, otherwise
3947 try to get the result in TARGET, if convenient (and in mode MODE if that's
3948 convenient). */
3950 static rtx
3951 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3953 if (!validate_arglist (exp,
3954 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3955 return NULL_RTX;
3956 else
3958 tree dest = CALL_EXPR_ARG (exp, 0);
3959 tree val = CALL_EXPR_ARG (exp, 1);
3960 tree len = CALL_EXPR_ARG (exp, 2);
3961 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3965 /* Helper function to do the actual work for expand_builtin_memset. The
3966 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3967 so that this can also be called without constructing an actual CALL_EXPR.
3968 The other arguments and return value are the same as for
3969 expand_builtin_memset. */
3971 static rtx
3972 expand_builtin_memset_args (tree dest, tree val, tree len,
3973 rtx target, enum machine_mode mode, tree orig_exp)
3975 tree fndecl, fn;
3976 enum built_in_function fcode;
3977 char c;
3978 unsigned int dest_align;
3979 rtx dest_mem, dest_addr, len_rtx;
3980 HOST_WIDE_INT expected_size = -1;
3981 unsigned int expected_align = 0;
3983 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3985 /* If DEST is not a pointer type, don't do this operation in-line. */
3986 if (dest_align == 0)
3987 return NULL_RTX;
3989 if (currently_expanding_gimple_stmt)
3990 stringop_block_profile (currently_expanding_gimple_stmt,
3991 &expected_align, &expected_size);
3993 if (expected_align < dest_align)
3994 expected_align = dest_align;
3996 /* If the LEN parameter is zero, return DEST. */
3997 if (integer_zerop (len))
3999 /* Evaluate and ignore VAL in case it has side-effects. */
4000 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4001 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4004 /* Stabilize the arguments in case we fail. */
4005 dest = builtin_save_expr (dest);
4006 val = builtin_save_expr (val);
4007 len = builtin_save_expr (len);
4009 len_rtx = expand_normal (len);
4010 dest_mem = get_memory_rtx (dest, len);
4012 if (TREE_CODE (val) != INTEGER_CST)
4014 rtx val_rtx;
4016 val_rtx = expand_normal (val);
4017 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
4018 val_rtx, 0);
4020 /* Assume that we can memset by pieces if we can store
4021 * the coefficients by pieces (in the required modes).
4022 * We can't pass builtin_memset_gen_str as that emits RTL. */
4023 c = 1;
4024 if (host_integerp (len, 1)
4025 && can_store_by_pieces (tree_low_cst (len, 1),
4026 builtin_memset_read_str, &c, dest_align,
4027 true))
4029 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
4030 val_rtx);
4031 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4032 builtin_memset_gen_str, val_rtx, dest_align,
4033 true, 0);
4035 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4036 dest_align, expected_align,
4037 expected_size))
4038 goto do_libcall;
4040 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4041 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4042 return dest_mem;
4045 if (target_char_cast (val, &c))
4046 goto do_libcall;
4048 if (c)
4050 if (host_integerp (len, 1)
4051 && can_store_by_pieces (tree_low_cst (len, 1),
4052 builtin_memset_read_str, &c, dest_align,
4053 true))
4054 store_by_pieces (dest_mem, tree_low_cst (len, 1),
4055 builtin_memset_read_str, &c, dest_align, true, 0);
4056 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
4057 dest_align, expected_align,
4058 expected_size))
4059 goto do_libcall;
4061 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4062 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4063 return dest_mem;
4066 set_mem_align (dest_mem, dest_align);
4067 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4068 CALL_EXPR_TAILCALL (orig_exp)
4069 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4070 expected_align, expected_size);
4072 if (dest_addr == 0)
4074 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4075 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4078 return dest_addr;
4080 do_libcall:
4081 fndecl = get_callee_fndecl (orig_exp);
4082 fcode = DECL_FUNCTION_CODE (fndecl);
4083 if (fcode == BUILT_IN_MEMSET)
4084 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4085 dest, val, len);
4086 else if (fcode == BUILT_IN_BZERO)
4087 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4088 dest, len);
4089 else
4090 gcc_unreachable ();
4091 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4092 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4093 return expand_call (fn, target, target == const0_rtx);
4096 /* Expand expression EXP, which is a call to the bzero builtin. Return
4097 NULL_RTX if we failed the caller should emit a normal call. */
4099 static rtx
4100 expand_builtin_bzero (tree exp)
4102 tree dest, size;
4103 location_t loc = EXPR_LOCATION (exp);
4105 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4106 return NULL_RTX;
4108 dest = CALL_EXPR_ARG (exp, 0);
4109 size = CALL_EXPR_ARG (exp, 1);
4111 /* New argument list transforming bzero(ptr x, int y) to
4112 memset(ptr x, int 0, size_t y). This is done this way
4113 so that if it isn't expanded inline, we fallback to
4114 calling bzero instead of memset. */
4116 return expand_builtin_memset_args (dest, integer_zero_node,
4117 fold_convert_loc (loc, sizetype, size),
4118 const0_rtx, VOIDmode, exp);
4121 /* Expand expression EXP, which is a call to the memcmp built-in function.
4122 Return NULL_RTX if we failed and the
4123 caller should emit a normal call, otherwise try to get the result in
4124 TARGET, if convenient (and in mode MODE, if that's convenient). */
4126 static rtx
4127 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4128 ATTRIBUTE_UNUSED enum machine_mode mode)
4130 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4132 if (!validate_arglist (exp,
4133 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4134 return NULL_RTX;
4136 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4138 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4139 rtx result;
4140 rtx insn;
4141 tree arg1 = CALL_EXPR_ARG (exp, 0);
4142 tree arg2 = CALL_EXPR_ARG (exp, 1);
4143 tree len = CALL_EXPR_ARG (exp, 2);
4145 unsigned int arg1_align
4146 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4147 unsigned int arg2_align
4148 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4149 enum machine_mode insn_mode;
4151 #ifdef HAVE_cmpmemsi
4152 if (HAVE_cmpmemsi)
4153 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4154 else
4155 #endif
4156 #ifdef HAVE_cmpstrnsi
4157 if (HAVE_cmpstrnsi)
4158 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4159 else
4160 #endif
4161 return NULL_RTX;
4163 /* If we don't have POINTER_TYPE, call the function. */
4164 if (arg1_align == 0 || arg2_align == 0)
4165 return NULL_RTX;
4167 /* Make a place to write the result of the instruction. */
4168 result = target;
4169 if (! (result != 0
4170 && REG_P (result) && GET_MODE (result) == insn_mode
4171 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4172 result = gen_reg_rtx (insn_mode);
4174 arg1_rtx = get_memory_rtx (arg1, len);
4175 arg2_rtx = get_memory_rtx (arg2, len);
4176 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4178 /* Set MEM_SIZE as appropriate. */
4179 if (CONST_INT_P (arg3_rtx))
4181 set_mem_size (arg1_rtx, arg3_rtx);
4182 set_mem_size (arg2_rtx, arg3_rtx);
4185 #ifdef HAVE_cmpmemsi
4186 if (HAVE_cmpmemsi)
4187 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4188 GEN_INT (MIN (arg1_align, arg2_align)));
4189 else
4190 #endif
4191 #ifdef HAVE_cmpstrnsi
4192 if (HAVE_cmpstrnsi)
4193 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4194 GEN_INT (MIN (arg1_align, arg2_align)));
4195 else
4196 #endif
4197 gcc_unreachable ();
4199 if (insn)
4200 emit_insn (insn);
4201 else
4202 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4203 TYPE_MODE (integer_type_node), 3,
4204 XEXP (arg1_rtx, 0), Pmode,
4205 XEXP (arg2_rtx, 0), Pmode,
4206 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4207 TYPE_UNSIGNED (sizetype)),
4208 TYPE_MODE (sizetype));
4210 /* Return the value in the proper mode for this function. */
4211 mode = TYPE_MODE (TREE_TYPE (exp));
4212 if (GET_MODE (result) == mode)
4213 return result;
4214 else if (target != 0)
4216 convert_move (target, result, 0);
4217 return target;
4219 else
4220 return convert_to_mode (mode, result, 0);
4222 #endif
4224 return NULL_RTX;
4227 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4228 if we failed the caller should emit a normal call, otherwise try to get
4229 the result in TARGET, if convenient. */
4231 static rtx
4232 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4234 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4235 return NULL_RTX;
4237 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4238 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4239 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
4241 rtx arg1_rtx, arg2_rtx;
4242 rtx result, insn = NULL_RTX;
4243 tree fndecl, fn;
4244 tree arg1 = CALL_EXPR_ARG (exp, 0);
4245 tree arg2 = CALL_EXPR_ARG (exp, 1);
4247 unsigned int arg1_align
4248 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4249 unsigned int arg2_align
4250 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4252 /* If we don't have POINTER_TYPE, call the function. */
4253 if (arg1_align == 0 || arg2_align == 0)
4254 return NULL_RTX;
4256 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4257 arg1 = builtin_save_expr (arg1);
4258 arg2 = builtin_save_expr (arg2);
4260 arg1_rtx = get_memory_rtx (arg1, NULL);
4261 arg2_rtx = get_memory_rtx (arg2, NULL);
4263 #ifdef HAVE_cmpstrsi
4264 /* Try to call cmpstrsi. */
4265 if (HAVE_cmpstrsi)
4267 enum machine_mode insn_mode
4268 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4270 /* Make a place to write the result of the instruction. */
4271 result = target;
4272 if (! (result != 0
4273 && REG_P (result) && GET_MODE (result) == insn_mode
4274 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4275 result = gen_reg_rtx (insn_mode);
4277 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4278 GEN_INT (MIN (arg1_align, arg2_align)));
4280 #endif
4281 #ifdef HAVE_cmpstrnsi
4282 /* Try to determine at least one length and call cmpstrnsi. */
4283 if (!insn && HAVE_cmpstrnsi)
4285 tree len;
4286 rtx arg3_rtx;
4288 enum machine_mode insn_mode
4289 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4290 tree len1 = c_strlen (arg1, 1);
4291 tree len2 = c_strlen (arg2, 1);
4293 if (len1)
4294 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4295 if (len2)
4296 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4298 /* If we don't have a constant length for the first, use the length
4299 of the second, if we know it. We don't require a constant for
4300 this case; some cost analysis could be done if both are available
4301 but neither is constant. For now, assume they're equally cheap,
4302 unless one has side effects. If both strings have constant lengths,
4303 use the smaller. */
4305 if (!len1)
4306 len = len2;
4307 else if (!len2)
4308 len = len1;
4309 else if (TREE_SIDE_EFFECTS (len1))
4310 len = len2;
4311 else if (TREE_SIDE_EFFECTS (len2))
4312 len = len1;
4313 else if (TREE_CODE (len1) != INTEGER_CST)
4314 len = len2;
4315 else if (TREE_CODE (len2) != INTEGER_CST)
4316 len = len1;
4317 else if (tree_int_cst_lt (len1, len2))
4318 len = len1;
4319 else
4320 len = len2;
4322 /* If both arguments have side effects, we cannot optimize. */
4323 if (!len || TREE_SIDE_EFFECTS (len))
4324 goto do_libcall;
4326 arg3_rtx = expand_normal (len);
4328 /* Make a place to write the result of the instruction. */
4329 result = target;
4330 if (! (result != 0
4331 && REG_P (result) && GET_MODE (result) == insn_mode
4332 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4333 result = gen_reg_rtx (insn_mode);
4335 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4336 GEN_INT (MIN (arg1_align, arg2_align)));
4338 #endif
4340 if (insn)
4342 enum machine_mode mode;
4343 emit_insn (insn);
4345 /* Return the value in the proper mode for this function. */
4346 mode = TYPE_MODE (TREE_TYPE (exp));
4347 if (GET_MODE (result) == mode)
4348 return result;
4349 if (target == 0)
4350 return convert_to_mode (mode, result, 0);
4351 convert_move (target, result, 0);
4352 return target;
4355 /* Expand the library call ourselves using a stabilized argument
4356 list to avoid re-evaluating the function's arguments twice. */
4357 #ifdef HAVE_cmpstrnsi
4358 do_libcall:
4359 #endif
4360 fndecl = get_callee_fndecl (exp);
4361 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4362 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4363 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4364 return expand_call (fn, target, target == const0_rtx);
4366 #endif
4367 return NULL_RTX;
4370 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4371 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4372 the result in TARGET, if convenient. */
4374 static rtx
4375 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4376 ATTRIBUTE_UNUSED enum machine_mode mode)
4378 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4380 if (!validate_arglist (exp,
4381 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4382 return NULL_RTX;
4384 /* If c_strlen can determine an expression for one of the string
4385 lengths, and it doesn't have side effects, then emit cmpstrnsi
4386 using length MIN(strlen(string)+1, arg3). */
4387 #ifdef HAVE_cmpstrnsi
4388 if (HAVE_cmpstrnsi)
4390 tree len, len1, len2;
4391 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4392 rtx result, insn;
4393 tree fndecl, fn;
4394 tree arg1 = CALL_EXPR_ARG (exp, 0);
4395 tree arg2 = CALL_EXPR_ARG (exp, 1);
4396 tree arg3 = CALL_EXPR_ARG (exp, 2);
4398 unsigned int arg1_align
4399 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4400 unsigned int arg2_align
4401 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4402 enum machine_mode insn_mode
4403 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4405 len1 = c_strlen (arg1, 1);
4406 len2 = c_strlen (arg2, 1);
4408 if (len1)
4409 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4410 if (len2)
4411 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4413 /* If we don't have a constant length for the first, use the length
4414 of the second, if we know it. We don't require a constant for
4415 this case; some cost analysis could be done if both are available
4416 but neither is constant. For now, assume they're equally cheap,
4417 unless one has side effects. If both strings have constant lengths,
4418 use the smaller. */
4420 if (!len1)
4421 len = len2;
4422 else if (!len2)
4423 len = len1;
4424 else if (TREE_SIDE_EFFECTS (len1))
4425 len = len2;
4426 else if (TREE_SIDE_EFFECTS (len2))
4427 len = len1;
4428 else if (TREE_CODE (len1) != INTEGER_CST)
4429 len = len2;
4430 else if (TREE_CODE (len2) != INTEGER_CST)
4431 len = len1;
4432 else if (tree_int_cst_lt (len1, len2))
4433 len = len1;
4434 else
4435 len = len2;
4437 /* If both arguments have side effects, we cannot optimize. */
4438 if (!len || TREE_SIDE_EFFECTS (len))
4439 return NULL_RTX;
4441 /* The actual new length parameter is MIN(len,arg3). */
4442 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4443 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4445 /* If we don't have POINTER_TYPE, call the function. */
4446 if (arg1_align == 0 || arg2_align == 0)
4447 return NULL_RTX;
4449 /* Make a place to write the result of the instruction. */
4450 result = target;
4451 if (! (result != 0
4452 && REG_P (result) && GET_MODE (result) == insn_mode
4453 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4454 result = gen_reg_rtx (insn_mode);
4456 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4457 arg1 = builtin_save_expr (arg1);
4458 arg2 = builtin_save_expr (arg2);
4459 len = builtin_save_expr (len);
4461 arg1_rtx = get_memory_rtx (arg1, len);
4462 arg2_rtx = get_memory_rtx (arg2, len);
4463 arg3_rtx = expand_normal (len);
4464 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4465 GEN_INT (MIN (arg1_align, arg2_align)));
4466 if (insn)
4468 emit_insn (insn);
4470 /* Return the value in the proper mode for this function. */
4471 mode = TYPE_MODE (TREE_TYPE (exp));
4472 if (GET_MODE (result) == mode)
4473 return result;
4474 if (target == 0)
4475 return convert_to_mode (mode, result, 0);
4476 convert_move (target, result, 0);
4477 return target;
4480 /* Expand the library call ourselves using a stabilized argument
4481 list to avoid re-evaluating the function's arguments twice. */
4482 fndecl = get_callee_fndecl (exp);
4483 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4484 arg1, arg2, len);
4485 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4486 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4487 return expand_call (fn, target, target == const0_rtx);
4489 #endif
4490 return NULL_RTX;
4493 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4494 if that's convenient. */
4497 expand_builtin_saveregs (void)
4499 rtx val, seq;
4501 /* Don't do __builtin_saveregs more than once in a function.
4502 Save the result of the first call and reuse it. */
4503 if (saveregs_value != 0)
4504 return saveregs_value;
4506 /* When this function is called, it means that registers must be
4507 saved on entry to this function. So we migrate the call to the
4508 first insn of this function. */
4510 start_sequence ();
4512 /* Do whatever the machine needs done in this case. */
4513 val = targetm.calls.expand_builtin_saveregs ();
4515 seq = get_insns ();
4516 end_sequence ();
4518 saveregs_value = val;
4520 /* Put the insns after the NOTE that starts the function. If this
4521 is inside a start_sequence, make the outer-level insn chain current, so
4522 the code is placed at the start of the function. */
4523 push_topmost_sequence ();
4524 emit_insn_after (seq, entry_of_function ());
4525 pop_topmost_sequence ();
4527 return val;
4530 /* Expand a call to __builtin_next_arg. */
4532 static rtx
4533 expand_builtin_next_arg (void)
4535 /* Checking arguments is already done in fold_builtin_next_arg
4536 that must be called before this function. */
4537 return expand_binop (ptr_mode, add_optab,
4538 crtl->args.internal_arg_pointer,
4539 crtl->args.arg_offset_rtx,
4540 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4543 /* Make it easier for the backends by protecting the valist argument
4544 from multiple evaluations. */
4546 static tree
4547 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4549 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4551 /* The current way of determining the type of valist is completely
4552 bogus. We should have the information on the va builtin instead. */
4553 if (!vatype)
4554 vatype = targetm.fn_abi_va_list (cfun->decl);
4556 if (TREE_CODE (vatype) == ARRAY_TYPE)
4558 if (TREE_SIDE_EFFECTS (valist))
4559 valist = save_expr (valist);
4561 /* For this case, the backends will be expecting a pointer to
4562 vatype, but it's possible we've actually been given an array
4563 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4564 So fix it. */
4565 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4567 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4568 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4571 else
4573 tree pt = build_pointer_type (vatype);
4575 if (! needs_lvalue)
4577 if (! TREE_SIDE_EFFECTS (valist))
4578 return valist;
4580 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4581 TREE_SIDE_EFFECTS (valist) = 1;
4584 if (TREE_SIDE_EFFECTS (valist))
4585 valist = save_expr (valist);
4586 valist = fold_build2_loc (loc, MEM_REF,
4587 vatype, valist, build_int_cst (pt, 0));
4590 return valist;
4593 /* The "standard" definition of va_list is void*. */
4595 tree
4596 std_build_builtin_va_list (void)
4598 return ptr_type_node;
4601 /* The "standard" abi va_list is va_list_type_node. */
4603 tree
4604 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4606 return va_list_type_node;
4609 /* The "standard" type of va_list is va_list_type_node. */
4611 tree
4612 std_canonical_va_list_type (tree type)
4614 tree wtype, htype;
4616 if (INDIRECT_REF_P (type))
4617 type = TREE_TYPE (type);
4618 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4619 type = TREE_TYPE (type);
4620 wtype = va_list_type_node;
4621 htype = type;
4622 /* Treat structure va_list types. */
4623 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4624 htype = TREE_TYPE (htype);
4625 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4627 /* If va_list is an array type, the argument may have decayed
4628 to a pointer type, e.g. by being passed to another function.
4629 In that case, unwrap both types so that we can compare the
4630 underlying records. */
4631 if (TREE_CODE (htype) == ARRAY_TYPE
4632 || POINTER_TYPE_P (htype))
4634 wtype = TREE_TYPE (wtype);
4635 htype = TREE_TYPE (htype);
4638 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4639 return va_list_type_node;
4641 return NULL_TREE;
4644 /* The "standard" implementation of va_start: just assign `nextarg' to
4645 the variable. */
4647 void
4648 std_expand_builtin_va_start (tree valist, rtx nextarg)
4650 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4651 convert_move (va_r, nextarg, 0);
4654 /* Expand EXP, a call to __builtin_va_start. */
4656 static rtx
4657 expand_builtin_va_start (tree exp)
4659 rtx nextarg;
4660 tree valist;
4661 location_t loc = EXPR_LOCATION (exp);
4663 if (call_expr_nargs (exp) < 2)
4665 error_at (loc, "too few arguments to function %<va_start%>");
4666 return const0_rtx;
4669 if (fold_builtin_next_arg (exp, true))
4670 return const0_rtx;
4672 nextarg = expand_builtin_next_arg ();
4673 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4675 if (targetm.expand_builtin_va_start)
4676 targetm.expand_builtin_va_start (valist, nextarg);
4677 else
4678 std_expand_builtin_va_start (valist, nextarg);
4680 return const0_rtx;
4683 /* The "standard" implementation of va_arg: read the value from the
4684 current (padded) address and increment by the (padded) size. */
4686 tree
4687 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4688 gimple_seq *post_p)
4690 tree addr, t, type_size, rounded_size, valist_tmp;
4691 unsigned HOST_WIDE_INT align, boundary;
4692 bool indirect;
4694 #ifdef ARGS_GROW_DOWNWARD
4695 /* All of the alignment and movement below is for args-grow-up machines.
4696 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4697 implement their own specialized gimplify_va_arg_expr routines. */
4698 gcc_unreachable ();
4699 #endif
4701 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4702 if (indirect)
4703 type = build_pointer_type (type);
4705 align = PARM_BOUNDARY / BITS_PER_UNIT;
4706 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4708 /* When we align parameter on stack for caller, if the parameter
4709 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4710 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4711 here with caller. */
4712 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4713 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4715 boundary /= BITS_PER_UNIT;
4717 /* Hoist the valist value into a temporary for the moment. */
4718 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4720 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4721 requires greater alignment, we must perform dynamic alignment. */
4722 if (boundary > align
4723 && !integer_zerop (TYPE_SIZE (type)))
4725 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4726 fold_build2 (POINTER_PLUS_EXPR,
4727 TREE_TYPE (valist),
4728 valist_tmp, size_int (boundary - 1)));
4729 gimplify_and_add (t, pre_p);
4731 t = fold_convert (sizetype, valist_tmp);
4732 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4733 fold_convert (TREE_TYPE (valist),
4734 fold_build2 (BIT_AND_EXPR, sizetype, t,
4735 size_int (-boundary))));
4736 gimplify_and_add (t, pre_p);
4738 else
4739 boundary = align;
4741 /* If the actual alignment is less than the alignment of the type,
4742 adjust the type accordingly so that we don't assume strict alignment
4743 when dereferencing the pointer. */
4744 boundary *= BITS_PER_UNIT;
4745 if (boundary < TYPE_ALIGN (type))
4747 type = build_variant_type_copy (type);
4748 TYPE_ALIGN (type) = boundary;
4751 /* Compute the rounded size of the type. */
4752 type_size = size_in_bytes (type);
4753 rounded_size = round_up (type_size, align);
4755 /* Reduce rounded_size so it's sharable with the postqueue. */
4756 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4758 /* Get AP. */
4759 addr = valist_tmp;
4760 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4762 /* Small args are padded downward. */
4763 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4764 rounded_size, size_int (align));
4765 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4766 size_binop (MINUS_EXPR, rounded_size, type_size));
4767 addr = fold_build2 (POINTER_PLUS_EXPR,
4768 TREE_TYPE (addr), addr, t);
4771 /* Compute new value for AP. */
4772 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4773 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4774 gimplify_and_add (t, pre_p);
4776 addr = fold_convert (build_pointer_type (type), addr);
4778 if (indirect)
4779 addr = build_va_arg_indirect_ref (addr);
4781 return build_va_arg_indirect_ref (addr);
4784 /* Build an indirect-ref expression over the given TREE, which represents a
4785 piece of a va_arg() expansion. */
4786 tree
4787 build_va_arg_indirect_ref (tree addr)
4789 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4791 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4792 mf_mark (addr);
4794 return addr;
4797 /* Return a dummy expression of type TYPE in order to keep going after an
4798 error. */
4800 static tree
4801 dummy_object (tree type)
4803 tree t = build_int_cst (build_pointer_type (type), 0);
4804 return build2 (MEM_REF, type, t, t);
4807 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4808 builtin function, but a very special sort of operator. */
4810 enum gimplify_status
4811 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4813 tree promoted_type, have_va_type;
4814 tree valist = TREE_OPERAND (*expr_p, 0);
4815 tree type = TREE_TYPE (*expr_p);
4816 tree t;
4817 location_t loc = EXPR_LOCATION (*expr_p);
4819 /* Verify that valist is of the proper type. */
4820 have_va_type = TREE_TYPE (valist);
4821 if (have_va_type == error_mark_node)
4822 return GS_ERROR;
4823 have_va_type = targetm.canonical_va_list_type (have_va_type);
4825 if (have_va_type == NULL_TREE)
4827 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4828 return GS_ERROR;
4831 /* Generate a diagnostic for requesting data of a type that cannot
4832 be passed through `...' due to type promotion at the call site. */
4833 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4834 != type)
4836 static bool gave_help;
4837 bool warned;
4839 /* Unfortunately, this is merely undefined, rather than a constraint
4840 violation, so we cannot make this an error. If this call is never
4841 executed, the program is still strictly conforming. */
4842 warned = warning_at (loc, 0,
4843 "%qT is promoted to %qT when passed through %<...%>",
4844 type, promoted_type);
4845 if (!gave_help && warned)
4847 gave_help = true;
4848 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4849 promoted_type, type);
4852 /* We can, however, treat "undefined" any way we please.
4853 Call abort to encourage the user to fix the program. */
4854 if (warned)
4855 inform (loc, "if this code is reached, the program will abort");
4856 /* Before the abort, allow the evaluation of the va_list
4857 expression to exit or longjmp. */
4858 gimplify_and_add (valist, pre_p);
4859 t = build_call_expr_loc (loc,
4860 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4861 gimplify_and_add (t, pre_p);
4863 /* This is dead code, but go ahead and finish so that the
4864 mode of the result comes out right. */
4865 *expr_p = dummy_object (type);
4866 return GS_ALL_DONE;
4868 else
4870 /* Make it easier for the backends by protecting the valist argument
4871 from multiple evaluations. */
4872 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4874 /* For this case, the backends will be expecting a pointer to
4875 TREE_TYPE (abi), but it's possible we've
4876 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4877 So fix it. */
4878 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4880 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4881 valist = fold_convert_loc (loc, p1,
4882 build_fold_addr_expr_loc (loc, valist));
4885 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4887 else
4888 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4890 if (!targetm.gimplify_va_arg_expr)
4891 /* FIXME: Once most targets are converted we should merely
4892 assert this is non-null. */
4893 return GS_ALL_DONE;
4895 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4896 return GS_OK;
4900 /* Expand EXP, a call to __builtin_va_end. */
4902 static rtx
4903 expand_builtin_va_end (tree exp)
4905 tree valist = CALL_EXPR_ARG (exp, 0);
4907 /* Evaluate for side effects, if needed. I hate macros that don't
4908 do that. */
4909 if (TREE_SIDE_EFFECTS (valist))
4910 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4912 return const0_rtx;
4915 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4916 builtin rather than just as an assignment in stdarg.h because of the
4917 nastiness of array-type va_list types. */
4919 static rtx
4920 expand_builtin_va_copy (tree exp)
4922 tree dst, src, t;
4923 location_t loc = EXPR_LOCATION (exp);
4925 dst = CALL_EXPR_ARG (exp, 0);
4926 src = CALL_EXPR_ARG (exp, 1);
4928 dst = stabilize_va_list_loc (loc, dst, 1);
4929 src = stabilize_va_list_loc (loc, src, 0);
4931 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4933 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4935 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4936 TREE_SIDE_EFFECTS (t) = 1;
4937 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4939 else
4941 rtx dstb, srcb, size;
4943 /* Evaluate to pointers. */
4944 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4945 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4946 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4947 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4949 dstb = convert_memory_address (Pmode, dstb);
4950 srcb = convert_memory_address (Pmode, srcb);
4952 /* "Dereference" to BLKmode memories. */
4953 dstb = gen_rtx_MEM (BLKmode, dstb);
4954 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4955 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4956 srcb = gen_rtx_MEM (BLKmode, srcb);
4957 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4958 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4960 /* Copy. */
4961 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4964 return const0_rtx;
4967 /* Expand a call to one of the builtin functions __builtin_frame_address or
4968 __builtin_return_address. */
4970 static rtx
4971 expand_builtin_frame_address (tree fndecl, tree exp)
4973 /* The argument must be a nonnegative integer constant.
4974 It counts the number of frames to scan up the stack.
4975 The value is the return address saved in that frame. */
4976 if (call_expr_nargs (exp) == 0)
4977 /* Warning about missing arg was already issued. */
4978 return const0_rtx;
4979 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4981 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4982 error ("invalid argument to %<__builtin_frame_address%>");
4983 else
4984 error ("invalid argument to %<__builtin_return_address%>");
4985 return const0_rtx;
4987 else
4989 rtx tem
4990 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4991 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4993 /* Some ports cannot access arbitrary stack frames. */
4994 if (tem == NULL)
4996 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4997 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4998 else
4999 warning (0, "unsupported argument to %<__builtin_return_address%>");
5000 return const0_rtx;
5003 /* For __builtin_frame_address, return what we've got. */
5004 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5005 return tem;
5007 if (!REG_P (tem)
5008 && ! CONSTANT_P (tem))
5009 tem = copy_to_mode_reg (Pmode, tem);
5010 return tem;
5014 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5015 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
5016 is the same as for allocate_dynamic_stack_space. */
5018 static rtx
5019 expand_builtin_alloca (tree exp, bool cannot_accumulate)
5021 rtx op0;
5022 rtx result;
5024 /* Emit normal call if marked not-inlineable. */
5025 if (CALL_CANNOT_INLINE_P (exp))
5026 return NULL_RTX;
5028 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5029 return NULL_RTX;
5031 /* Compute the argument. */
5032 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5034 /* Allocate the desired space. */
5035 result = allocate_dynamic_stack_space (op0, 0, BIGGEST_ALIGNMENT,
5036 cannot_accumulate);
5037 result = convert_memory_address (ptr_mode, result);
5039 return result;
5042 /* Expand a call to a bswap builtin with argument ARG0. MODE
5043 is the mode to expand with. */
5045 static rtx
5046 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5048 enum machine_mode mode;
5049 tree arg;
5050 rtx op0;
5052 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5053 return NULL_RTX;
5055 arg = CALL_EXPR_ARG (exp, 0);
5056 mode = TYPE_MODE (TREE_TYPE (arg));
5057 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5059 target = expand_unop (mode, bswap_optab, op0, target, 1);
5061 gcc_assert (target);
5063 return convert_to_mode (mode, target, 0);
5066 /* Expand a call to a unary builtin in EXP.
5067 Return NULL_RTX if a normal call should be emitted rather than expanding the
5068 function in-line. If convenient, the result should be placed in TARGET.
5069 SUBTARGET may be used as the target for computing one of EXP's operands. */
5071 static rtx
5072 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5073 rtx subtarget, optab op_optab)
5075 rtx op0;
5077 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5078 return NULL_RTX;
5080 /* Compute the argument. */
5081 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5082 (subtarget
5083 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5084 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5085 VOIDmode, EXPAND_NORMAL);
5086 /* Compute op, into TARGET if possible.
5087 Set TARGET to wherever the result comes back. */
5088 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5089 op_optab, op0, target, 1);
5090 gcc_assert (target);
5092 return convert_to_mode (target_mode, target, 0);
5095 /* Expand a call to __builtin_expect. We just return our argument
5096 as the builtin_expect semantic should've been already executed by
5097 tree branch prediction pass. */
5099 static rtx
5100 expand_builtin_expect (tree exp, rtx target)
5102 tree arg;
5104 if (call_expr_nargs (exp) < 2)
5105 return const0_rtx;
5106 arg = CALL_EXPR_ARG (exp, 0);
5108 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5109 /* When guessing was done, the hints should be already stripped away. */
5110 gcc_assert (!flag_guess_branch_prob
5111 || optimize == 0 || seen_error ());
5112 return target;
5115 void
5116 expand_builtin_trap (void)
5118 #ifdef HAVE_trap
5119 if (HAVE_trap)
5120 emit_insn (gen_trap ());
5121 else
5122 #endif
5123 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5124 emit_barrier ();
5127 /* Expand a call to __builtin_unreachable. We do nothing except emit
5128 a barrier saying that control flow will not pass here.
5130 It is the responsibility of the program being compiled to ensure
5131 that control flow does never reach __builtin_unreachable. */
5132 static void
5133 expand_builtin_unreachable (void)
5135 emit_barrier ();
5138 /* Expand EXP, a call to fabs, fabsf or fabsl.
5139 Return NULL_RTX if a normal call should be emitted rather than expanding
5140 the function inline. If convenient, the result should be placed
5141 in TARGET. SUBTARGET may be used as the target for computing
5142 the operand. */
5144 static rtx
5145 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5147 enum machine_mode mode;
5148 tree arg;
5149 rtx op0;
5151 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5152 return NULL_RTX;
5154 arg = CALL_EXPR_ARG (exp, 0);
5155 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5156 mode = TYPE_MODE (TREE_TYPE (arg));
5157 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5158 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5161 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5162 Return NULL is a normal call should be emitted rather than expanding the
5163 function inline. If convenient, the result should be placed in TARGET.
5164 SUBTARGET may be used as the target for computing the operand. */
5166 static rtx
5167 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5169 rtx op0, op1;
5170 tree arg;
5172 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5173 return NULL_RTX;
5175 arg = CALL_EXPR_ARG (exp, 0);
5176 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5178 arg = CALL_EXPR_ARG (exp, 1);
5179 op1 = expand_normal (arg);
5181 return expand_copysign (op0, op1, target);
5184 /* Create a new constant string literal and return a char* pointer to it.
5185 The STRING_CST value is the LEN characters at STR. */
5186 tree
5187 build_string_literal (int len, const char *str)
5189 tree t, elem, index, type;
5191 t = build_string (len, str);
5192 elem = build_type_variant (char_type_node, 1, 0);
5193 index = build_index_type (size_int (len - 1));
5194 type = build_array_type (elem, index);
5195 TREE_TYPE (t) = type;
5196 TREE_CONSTANT (t) = 1;
5197 TREE_READONLY (t) = 1;
5198 TREE_STATIC (t) = 1;
5200 type = build_pointer_type (elem);
5201 t = build1 (ADDR_EXPR, type,
5202 build4 (ARRAY_REF, elem,
5203 t, integer_zero_node, NULL_TREE, NULL_TREE));
5204 return t;
5207 /* Expand a call to __builtin___clear_cache. */
5209 static rtx
5210 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5212 #ifndef HAVE_clear_cache
5213 #ifdef CLEAR_INSN_CACHE
5214 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5215 does something. Just do the default expansion to a call to
5216 __clear_cache(). */
5217 return NULL_RTX;
5218 #else
5219 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5220 does nothing. There is no need to call it. Do nothing. */
5221 return const0_rtx;
5222 #endif /* CLEAR_INSN_CACHE */
5223 #else
5224 /* We have a "clear_cache" insn, and it will handle everything. */
5225 tree begin, end;
5226 rtx begin_rtx, end_rtx;
5227 enum insn_code icode;
5229 /* We must not expand to a library call. If we did, any
5230 fallback library function in libgcc that might contain a call to
5231 __builtin___clear_cache() would recurse infinitely. */
5232 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5234 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5235 return const0_rtx;
5238 if (HAVE_clear_cache)
5240 icode = CODE_FOR_clear_cache;
5242 begin = CALL_EXPR_ARG (exp, 0);
5243 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5244 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5245 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5246 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5248 end = CALL_EXPR_ARG (exp, 1);
5249 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5250 end_rtx = convert_memory_address (Pmode, end_rtx);
5251 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5252 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5254 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5256 return const0_rtx;
5257 #endif /* HAVE_clear_cache */
5260 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5262 static rtx
5263 round_trampoline_addr (rtx tramp)
5265 rtx temp, addend, mask;
5267 /* If we don't need too much alignment, we'll have been guaranteed
5268 proper alignment by get_trampoline_type. */
5269 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5270 return tramp;
5272 /* Round address up to desired boundary. */
5273 temp = gen_reg_rtx (Pmode);
5274 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5275 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5277 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5278 temp, 0, OPTAB_LIB_WIDEN);
5279 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5280 temp, 0, OPTAB_LIB_WIDEN);
5282 return tramp;
5285 static rtx
5286 expand_builtin_init_trampoline (tree exp)
5288 tree t_tramp, t_func, t_chain;
5289 rtx m_tramp, r_tramp, r_chain, tmp;
5291 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5292 POINTER_TYPE, VOID_TYPE))
5293 return NULL_RTX;
5295 t_tramp = CALL_EXPR_ARG (exp, 0);
5296 t_func = CALL_EXPR_ARG (exp, 1);
5297 t_chain = CALL_EXPR_ARG (exp, 2);
5299 r_tramp = expand_normal (t_tramp);
5300 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5301 MEM_NOTRAP_P (m_tramp) = 1;
5303 /* The TRAMP argument should be the address of a field within the
5304 local function's FRAME decl. Let's see if we can fill in the
5305 to fill in the MEM_ATTRs for this memory. */
5306 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5307 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5308 true, 0);
5310 tmp = round_trampoline_addr (r_tramp);
5311 if (tmp != r_tramp)
5313 m_tramp = change_address (m_tramp, BLKmode, tmp);
5314 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5315 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5318 /* The FUNC argument should be the address of the nested function.
5319 Extract the actual function decl to pass to the hook. */
5320 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5321 t_func = TREE_OPERAND (t_func, 0);
5322 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5324 r_chain = expand_normal (t_chain);
5326 /* Generate insns to initialize the trampoline. */
5327 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5329 trampolines_created = 1;
5331 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5332 "trampoline generated for nested function %qD", t_func);
5334 return const0_rtx;
5337 static rtx
5338 expand_builtin_adjust_trampoline (tree exp)
5340 rtx tramp;
5342 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5343 return NULL_RTX;
5345 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5346 tramp = round_trampoline_addr (tramp);
5347 if (targetm.calls.trampoline_adjust_address)
5348 tramp = targetm.calls.trampoline_adjust_address (tramp);
5350 return tramp;
5353 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5354 function. The function first checks whether the back end provides
5355 an insn to implement signbit for the respective mode. If not, it
5356 checks whether the floating point format of the value is such that
5357 the sign bit can be extracted. If that is not the case, the
5358 function returns NULL_RTX to indicate that a normal call should be
5359 emitted rather than expanding the function in-line. EXP is the
5360 expression that is a call to the builtin function; if convenient,
5361 the result should be placed in TARGET. */
5362 static rtx
5363 expand_builtin_signbit (tree exp, rtx target)
5365 const struct real_format *fmt;
5366 enum machine_mode fmode, imode, rmode;
5367 tree arg;
5368 int word, bitpos;
5369 enum insn_code icode;
5370 rtx temp;
5371 location_t loc = EXPR_LOCATION (exp);
5373 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5374 return NULL_RTX;
5376 arg = CALL_EXPR_ARG (exp, 0);
5377 fmode = TYPE_MODE (TREE_TYPE (arg));
5378 rmode = TYPE_MODE (TREE_TYPE (exp));
5379 fmt = REAL_MODE_FORMAT (fmode);
5381 arg = builtin_save_expr (arg);
5383 /* Expand the argument yielding a RTX expression. */
5384 temp = expand_normal (arg);
5386 /* Check if the back end provides an insn that handles signbit for the
5387 argument's mode. */
5388 icode = optab_handler (signbit_optab, fmode);
5389 if (icode != CODE_FOR_nothing)
5391 rtx last = get_last_insn ();
5392 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5393 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5394 return target;
5395 delete_insns_since (last);
5398 /* For floating point formats without a sign bit, implement signbit
5399 as "ARG < 0.0". */
5400 bitpos = fmt->signbit_ro;
5401 if (bitpos < 0)
5403 /* But we can't do this if the format supports signed zero. */
5404 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5405 return NULL_RTX;
5407 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5408 build_real (TREE_TYPE (arg), dconst0));
5409 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5412 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5414 imode = int_mode_for_mode (fmode);
5415 if (imode == BLKmode)
5416 return NULL_RTX;
5417 temp = gen_lowpart (imode, temp);
5419 else
5421 imode = word_mode;
5422 /* Handle targets with different FP word orders. */
5423 if (FLOAT_WORDS_BIG_ENDIAN)
5424 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5425 else
5426 word = bitpos / BITS_PER_WORD;
5427 temp = operand_subword_force (temp, word, fmode);
5428 bitpos = bitpos % BITS_PER_WORD;
5431 /* Force the intermediate word_mode (or narrower) result into a
5432 register. This avoids attempting to create paradoxical SUBREGs
5433 of floating point modes below. */
5434 temp = force_reg (imode, temp);
5436 /* If the bitpos is within the "result mode" lowpart, the operation
5437 can be implement with a single bitwise AND. Otherwise, we need
5438 a right shift and an AND. */
5440 if (bitpos < GET_MODE_BITSIZE (rmode))
5442 double_int mask = double_int_setbit (double_int_zero, bitpos);
5444 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5445 temp = gen_lowpart (rmode, temp);
5446 temp = expand_binop (rmode, and_optab, temp,
5447 immed_double_int_const (mask, rmode),
5448 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5450 else
5452 /* Perform a logical right shift to place the signbit in the least
5453 significant bit, then truncate the result to the desired mode
5454 and mask just this bit. */
5455 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5456 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5457 temp = gen_lowpart (rmode, temp);
5458 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5459 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5462 return temp;
5465 /* Expand fork or exec calls. TARGET is the desired target of the
5466 call. EXP is the call. FN is the
5467 identificator of the actual function. IGNORE is nonzero if the
5468 value is to be ignored. */
5470 static rtx
5471 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5473 tree id, decl;
5474 tree call;
5476 /* If we are not profiling, just call the function. */
5477 if (!profile_arc_flag)
5478 return NULL_RTX;
5480 /* Otherwise call the wrapper. This should be equivalent for the rest of
5481 compiler, so the code does not diverge, and the wrapper may run the
5482 code necessary for keeping the profiling sane. */
5484 switch (DECL_FUNCTION_CODE (fn))
5486 case BUILT_IN_FORK:
5487 id = get_identifier ("__gcov_fork");
5488 break;
5490 case BUILT_IN_EXECL:
5491 id = get_identifier ("__gcov_execl");
5492 break;
5494 case BUILT_IN_EXECV:
5495 id = get_identifier ("__gcov_execv");
5496 break;
5498 case BUILT_IN_EXECLP:
5499 id = get_identifier ("__gcov_execlp");
5500 break;
5502 case BUILT_IN_EXECLE:
5503 id = get_identifier ("__gcov_execle");
5504 break;
5506 case BUILT_IN_EXECVP:
5507 id = get_identifier ("__gcov_execvp");
5508 break;
5510 case BUILT_IN_EXECVE:
5511 id = get_identifier ("__gcov_execve");
5512 break;
5514 default:
5515 gcc_unreachable ();
5518 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5519 FUNCTION_DECL, id, TREE_TYPE (fn));
5520 DECL_EXTERNAL (decl) = 1;
5521 TREE_PUBLIC (decl) = 1;
5522 DECL_ARTIFICIAL (decl) = 1;
5523 TREE_NOTHROW (decl) = 1;
5524 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5525 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5526 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5527 return expand_call (call, target, ignore);
5532 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5533 the pointer in these functions is void*, the tree optimizers may remove
5534 casts. The mode computed in expand_builtin isn't reliable either, due
5535 to __sync_bool_compare_and_swap.
5537 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5538 group of builtins. This gives us log2 of the mode size. */
5540 static inline enum machine_mode
5541 get_builtin_sync_mode (int fcode_diff)
5543 /* The size is not negotiable, so ask not to get BLKmode in return
5544 if the target indicates that a smaller size would be better. */
5545 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5548 /* Expand the memory expression LOC and return the appropriate memory operand
5549 for the builtin_sync operations. */
5551 static rtx
5552 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5554 rtx addr, mem;
5556 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5557 addr = convert_memory_address (Pmode, addr);
5559 /* Note that we explicitly do not want any alias information for this
5560 memory, so that we kill all other live memories. Otherwise we don't
5561 satisfy the full barrier semantics of the intrinsic. */
5562 mem = validize_mem (gen_rtx_MEM (mode, addr));
5564 /* The alignment needs to be at least according to that of the mode. */
5565 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5566 get_pointer_alignment (loc, BIGGEST_ALIGNMENT)));
5567 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5568 MEM_VOLATILE_P (mem) = 1;
5570 return mem;
5573 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5574 EXP is the CALL_EXPR. CODE is the rtx code
5575 that corresponds to the arithmetic or logical operation from the name;
5576 an exception here is that NOT actually means NAND. TARGET is an optional
5577 place for us to store the results; AFTER is true if this is the
5578 fetch_and_xxx form. IGNORE is true if we don't actually care about
5579 the result of the operation at all. */
5581 static rtx
5582 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5583 enum rtx_code code, bool after,
5584 rtx target, bool ignore)
5586 rtx val, mem;
5587 enum machine_mode old_mode;
5588 location_t loc = EXPR_LOCATION (exp);
5590 if (code == NOT && warn_sync_nand)
5592 tree fndecl = get_callee_fndecl (exp);
5593 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5595 static bool warned_f_a_n, warned_n_a_f;
5597 switch (fcode)
5599 case BUILT_IN_FETCH_AND_NAND_1:
5600 case BUILT_IN_FETCH_AND_NAND_2:
5601 case BUILT_IN_FETCH_AND_NAND_4:
5602 case BUILT_IN_FETCH_AND_NAND_8:
5603 case BUILT_IN_FETCH_AND_NAND_16:
5605 if (warned_f_a_n)
5606 break;
5608 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5609 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5610 warned_f_a_n = true;
5611 break;
5613 case BUILT_IN_NAND_AND_FETCH_1:
5614 case BUILT_IN_NAND_AND_FETCH_2:
5615 case BUILT_IN_NAND_AND_FETCH_4:
5616 case BUILT_IN_NAND_AND_FETCH_8:
5617 case BUILT_IN_NAND_AND_FETCH_16:
5619 if (warned_n_a_f)
5620 break;
5622 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5623 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5624 warned_n_a_f = true;
5625 break;
5627 default:
5628 gcc_unreachable ();
5632 /* Expand the operands. */
5633 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5635 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5636 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5637 of CONST_INTs, where we know the old_mode only from the call argument. */
5638 old_mode = GET_MODE (val);
5639 if (old_mode == VOIDmode)
5640 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5641 val = convert_modes (mode, old_mode, val, 1);
5643 if (ignore)
5644 return expand_sync_operation (mem, val, code);
5645 else
5646 return expand_sync_fetch_operation (mem, val, code, after, target);
5649 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5650 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5651 true if this is the boolean form. TARGET is a place for us to store the
5652 results; this is NOT optional if IS_BOOL is true. */
5654 static rtx
5655 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5656 bool is_bool, rtx target)
5658 rtx old_val, new_val, mem;
5659 enum machine_mode old_mode;
5661 /* Expand the operands. */
5662 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5665 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5666 mode, EXPAND_NORMAL);
5667 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5668 of CONST_INTs, where we know the old_mode only from the call argument. */
5669 old_mode = GET_MODE (old_val);
5670 if (old_mode == VOIDmode)
5671 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5672 old_val = convert_modes (mode, old_mode, old_val, 1);
5674 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5675 mode, EXPAND_NORMAL);
5676 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5677 of CONST_INTs, where we know the old_mode only from the call argument. */
5678 old_mode = GET_MODE (new_val);
5679 if (old_mode == VOIDmode)
5680 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5681 new_val = convert_modes (mode, old_mode, new_val, 1);
5683 if (is_bool)
5684 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5685 else
5686 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5689 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5690 general form is actually an atomic exchange, and some targets only
5691 support a reduced form with the second argument being a constant 1.
5692 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5693 the results. */
5695 static rtx
5696 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5697 rtx target)
5699 rtx val, mem;
5700 enum machine_mode old_mode;
5702 /* Expand the operands. */
5703 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5704 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5705 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5706 of CONST_INTs, where we know the old_mode only from the call argument. */
5707 old_mode = GET_MODE (val);
5708 if (old_mode == VOIDmode)
5709 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5710 val = convert_modes (mode, old_mode, val, 1);
5712 return expand_sync_lock_test_and_set (mem, val, target);
5715 /* Expand the __sync_synchronize intrinsic. */
5717 static void
5718 expand_builtin_synchronize (void)
5720 gimple x;
5721 VEC (tree, gc) *v_clobbers;
5723 #ifdef HAVE_memory_barrier
5724 if (HAVE_memory_barrier)
5726 emit_insn (gen_memory_barrier ());
5727 return;
5729 #endif
5731 if (synchronize_libfunc != NULL_RTX)
5733 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5734 return;
5737 /* If no explicit memory barrier instruction is available, create an
5738 empty asm stmt with a memory clobber. */
5739 v_clobbers = VEC_alloc (tree, gc, 1);
5740 VEC_quick_push (tree, v_clobbers,
5741 tree_cons (NULL, build_string (6, "memory"), NULL));
5742 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5743 gimple_asm_set_volatile (x, true);
5744 expand_asm_stmt (x);
5747 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5749 static void
5750 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5752 enum insn_code icode;
5753 rtx mem, insn;
5754 rtx val = const0_rtx;
5756 /* Expand the operands. */
5757 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5759 /* If there is an explicit operation in the md file, use it. */
5760 icode = direct_optab_handler (sync_lock_release_optab, mode);
5761 if (icode != CODE_FOR_nothing)
5763 if (!insn_data[icode].operand[1].predicate (val, mode))
5764 val = force_reg (mode, val);
5766 insn = GEN_FCN (icode) (mem, val);
5767 if (insn)
5769 emit_insn (insn);
5770 return;
5774 /* Otherwise we can implement this operation by emitting a barrier
5775 followed by a store of zero. */
5776 expand_builtin_synchronize ();
5777 emit_move_insn (mem, val);
5780 /* Expand an expression EXP that calls a built-in function,
5781 with result going to TARGET if that's convenient
5782 (and in mode MODE if that's convenient).
5783 SUBTARGET may be used as the target for computing one of EXP's operands.
5784 IGNORE is nonzero if the value is to be ignored. */
5787 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5788 int ignore)
5790 tree fndecl = get_callee_fndecl (exp);
5791 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5792 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5793 int flags;
5795 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5796 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5798 /* When not optimizing, generate calls to library functions for a certain
5799 set of builtins. */
5800 if (!optimize
5801 && !called_as_built_in (fndecl)
5802 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5803 && fcode != BUILT_IN_ALLOCA
5804 && fcode != BUILT_IN_FREE)
5805 return expand_call (exp, target, ignore);
5807 /* The built-in function expanders test for target == const0_rtx
5808 to determine whether the function's result will be ignored. */
5809 if (ignore)
5810 target = const0_rtx;
5812 /* If the result of a pure or const built-in function is ignored, and
5813 none of its arguments are volatile, we can avoid expanding the
5814 built-in call and just evaluate the arguments for side-effects. */
5815 if (target == const0_rtx
5816 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5817 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5819 bool volatilep = false;
5820 tree arg;
5821 call_expr_arg_iterator iter;
5823 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5824 if (TREE_THIS_VOLATILE (arg))
5826 volatilep = true;
5827 break;
5830 if (! volatilep)
5832 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5833 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5834 return const0_rtx;
5838 switch (fcode)
5840 CASE_FLT_FN (BUILT_IN_FABS):
5841 target = expand_builtin_fabs (exp, target, subtarget);
5842 if (target)
5843 return target;
5844 break;
5846 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5847 target = expand_builtin_copysign (exp, target, subtarget);
5848 if (target)
5849 return target;
5850 break;
5852 /* Just do a normal library call if we were unable to fold
5853 the values. */
5854 CASE_FLT_FN (BUILT_IN_CABS):
5855 break;
5857 CASE_FLT_FN (BUILT_IN_EXP):
5858 CASE_FLT_FN (BUILT_IN_EXP10):
5859 CASE_FLT_FN (BUILT_IN_POW10):
5860 CASE_FLT_FN (BUILT_IN_EXP2):
5861 CASE_FLT_FN (BUILT_IN_EXPM1):
5862 CASE_FLT_FN (BUILT_IN_LOGB):
5863 CASE_FLT_FN (BUILT_IN_LOG):
5864 CASE_FLT_FN (BUILT_IN_LOG10):
5865 CASE_FLT_FN (BUILT_IN_LOG2):
5866 CASE_FLT_FN (BUILT_IN_LOG1P):
5867 CASE_FLT_FN (BUILT_IN_TAN):
5868 CASE_FLT_FN (BUILT_IN_ASIN):
5869 CASE_FLT_FN (BUILT_IN_ACOS):
5870 CASE_FLT_FN (BUILT_IN_ATAN):
5871 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5872 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5873 because of possible accuracy problems. */
5874 if (! flag_unsafe_math_optimizations)
5875 break;
5876 CASE_FLT_FN (BUILT_IN_SQRT):
5877 CASE_FLT_FN (BUILT_IN_FLOOR):
5878 CASE_FLT_FN (BUILT_IN_CEIL):
5879 CASE_FLT_FN (BUILT_IN_TRUNC):
5880 CASE_FLT_FN (BUILT_IN_ROUND):
5881 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5882 CASE_FLT_FN (BUILT_IN_RINT):
5883 target = expand_builtin_mathfn (exp, target, subtarget);
5884 if (target)
5885 return target;
5886 break;
5888 CASE_FLT_FN (BUILT_IN_FMA):
5889 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5890 if (target)
5891 return target;
5892 break;
5894 CASE_FLT_FN (BUILT_IN_ILOGB):
5895 if (! flag_unsafe_math_optimizations)
5896 break;
5897 CASE_FLT_FN (BUILT_IN_ISINF):
5898 CASE_FLT_FN (BUILT_IN_FINITE):
5899 case BUILT_IN_ISFINITE:
5900 case BUILT_IN_ISNORMAL:
5901 target = expand_builtin_interclass_mathfn (exp, target);
5902 if (target)
5903 return target;
5904 break;
5906 CASE_FLT_FN (BUILT_IN_LCEIL):
5907 CASE_FLT_FN (BUILT_IN_LLCEIL):
5908 CASE_FLT_FN (BUILT_IN_LFLOOR):
5909 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5910 target = expand_builtin_int_roundingfn (exp, target);
5911 if (target)
5912 return target;
5913 break;
5915 CASE_FLT_FN (BUILT_IN_LRINT):
5916 CASE_FLT_FN (BUILT_IN_LLRINT):
5917 CASE_FLT_FN (BUILT_IN_LROUND):
5918 CASE_FLT_FN (BUILT_IN_LLROUND):
5919 target = expand_builtin_int_roundingfn_2 (exp, target);
5920 if (target)
5921 return target;
5922 break;
5924 CASE_FLT_FN (BUILT_IN_POW):
5925 target = expand_builtin_pow (exp, target, subtarget);
5926 if (target)
5927 return target;
5928 break;
5930 CASE_FLT_FN (BUILT_IN_POWI):
5931 target = expand_builtin_powi (exp, target);
5932 if (target)
5933 return target;
5934 break;
5936 CASE_FLT_FN (BUILT_IN_ATAN2):
5937 CASE_FLT_FN (BUILT_IN_LDEXP):
5938 CASE_FLT_FN (BUILT_IN_SCALB):
5939 CASE_FLT_FN (BUILT_IN_SCALBN):
5940 CASE_FLT_FN (BUILT_IN_SCALBLN):
5941 if (! flag_unsafe_math_optimizations)
5942 break;
5944 CASE_FLT_FN (BUILT_IN_FMOD):
5945 CASE_FLT_FN (BUILT_IN_REMAINDER):
5946 CASE_FLT_FN (BUILT_IN_DREM):
5947 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5948 if (target)
5949 return target;
5950 break;
5952 CASE_FLT_FN (BUILT_IN_CEXPI):
5953 target = expand_builtin_cexpi (exp, target);
5954 gcc_assert (target);
5955 return target;
5957 CASE_FLT_FN (BUILT_IN_SIN):
5958 CASE_FLT_FN (BUILT_IN_COS):
5959 if (! flag_unsafe_math_optimizations)
5960 break;
5961 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5962 if (target)
5963 return target;
5964 break;
5966 CASE_FLT_FN (BUILT_IN_SINCOS):
5967 if (! flag_unsafe_math_optimizations)
5968 break;
5969 target = expand_builtin_sincos (exp);
5970 if (target)
5971 return target;
5972 break;
5974 case BUILT_IN_APPLY_ARGS:
5975 return expand_builtin_apply_args ();
5977 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5978 FUNCTION with a copy of the parameters described by
5979 ARGUMENTS, and ARGSIZE. It returns a block of memory
5980 allocated on the stack into which is stored all the registers
5981 that might possibly be used for returning the result of a
5982 function. ARGUMENTS is the value returned by
5983 __builtin_apply_args. ARGSIZE is the number of bytes of
5984 arguments that must be copied. ??? How should this value be
5985 computed? We'll also need a safe worst case value for varargs
5986 functions. */
5987 case BUILT_IN_APPLY:
5988 if (!validate_arglist (exp, POINTER_TYPE,
5989 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5990 && !validate_arglist (exp, REFERENCE_TYPE,
5991 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5992 return const0_rtx;
5993 else
5995 rtx ops[3];
5997 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5998 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5999 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6001 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6004 /* __builtin_return (RESULT) causes the function to return the
6005 value described by RESULT. RESULT is address of the block of
6006 memory returned by __builtin_apply. */
6007 case BUILT_IN_RETURN:
6008 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6009 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6010 return const0_rtx;
6012 case BUILT_IN_SAVEREGS:
6013 return expand_builtin_saveregs ();
6015 case BUILT_IN_VA_ARG_PACK:
6016 /* All valid uses of __builtin_va_arg_pack () are removed during
6017 inlining. */
6018 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6019 return const0_rtx;
6021 case BUILT_IN_VA_ARG_PACK_LEN:
6022 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6023 inlining. */
6024 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6025 return const0_rtx;
6027 /* Return the address of the first anonymous stack arg. */
6028 case BUILT_IN_NEXT_ARG:
6029 if (fold_builtin_next_arg (exp, false))
6030 return const0_rtx;
6031 return expand_builtin_next_arg ();
6033 case BUILT_IN_CLEAR_CACHE:
6034 target = expand_builtin___clear_cache (exp);
6035 if (target)
6036 return target;
6037 break;
6039 case BUILT_IN_CLASSIFY_TYPE:
6040 return expand_builtin_classify_type (exp);
6042 case BUILT_IN_CONSTANT_P:
6043 return const0_rtx;
6045 case BUILT_IN_FRAME_ADDRESS:
6046 case BUILT_IN_RETURN_ADDRESS:
6047 return expand_builtin_frame_address (fndecl, exp);
6049 /* Returns the address of the area where the structure is returned.
6050 0 otherwise. */
6051 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6052 if (call_expr_nargs (exp) != 0
6053 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6054 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6055 return const0_rtx;
6056 else
6057 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6059 case BUILT_IN_ALLOCA:
6060 /* If the allocation stems from the declaration of a variable-sized
6061 object, it cannot accumulate. */
6062 target = expand_builtin_alloca (exp, ALLOCA_FOR_VAR_P (exp));
6063 if (target)
6064 return target;
6065 break;
6067 case BUILT_IN_STACK_SAVE:
6068 return expand_stack_save ();
6070 case BUILT_IN_STACK_RESTORE:
6071 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6072 return const0_rtx;
6074 case BUILT_IN_BSWAP32:
6075 case BUILT_IN_BSWAP64:
6076 target = expand_builtin_bswap (exp, target, subtarget);
6078 if (target)
6079 return target;
6080 break;
6082 CASE_INT_FN (BUILT_IN_FFS):
6083 case BUILT_IN_FFSIMAX:
6084 target = expand_builtin_unop (target_mode, exp, target,
6085 subtarget, ffs_optab);
6086 if (target)
6087 return target;
6088 break;
6090 CASE_INT_FN (BUILT_IN_CLZ):
6091 case BUILT_IN_CLZIMAX:
6092 target = expand_builtin_unop (target_mode, exp, target,
6093 subtarget, clz_optab);
6094 if (target)
6095 return target;
6096 break;
6098 CASE_INT_FN (BUILT_IN_CTZ):
6099 case BUILT_IN_CTZIMAX:
6100 target = expand_builtin_unop (target_mode, exp, target,
6101 subtarget, ctz_optab);
6102 if (target)
6103 return target;
6104 break;
6106 CASE_INT_FN (BUILT_IN_POPCOUNT):
6107 case BUILT_IN_POPCOUNTIMAX:
6108 target = expand_builtin_unop (target_mode, exp, target,
6109 subtarget, popcount_optab);
6110 if (target)
6111 return target;
6112 break;
6114 CASE_INT_FN (BUILT_IN_PARITY):
6115 case BUILT_IN_PARITYIMAX:
6116 target = expand_builtin_unop (target_mode, exp, target,
6117 subtarget, parity_optab);
6118 if (target)
6119 return target;
6120 break;
6122 case BUILT_IN_STRLEN:
6123 target = expand_builtin_strlen (exp, target, target_mode);
6124 if (target)
6125 return target;
6126 break;
6128 case BUILT_IN_STRCPY:
6129 target = expand_builtin_strcpy (exp, target);
6130 if (target)
6131 return target;
6132 break;
6134 case BUILT_IN_STRNCPY:
6135 target = expand_builtin_strncpy (exp, target);
6136 if (target)
6137 return target;
6138 break;
6140 case BUILT_IN_STPCPY:
6141 target = expand_builtin_stpcpy (exp, target, mode);
6142 if (target)
6143 return target;
6144 break;
6146 case BUILT_IN_MEMCPY:
6147 target = expand_builtin_memcpy (exp, target);
6148 if (target)
6149 return target;
6150 break;
6152 case BUILT_IN_MEMPCPY:
6153 target = expand_builtin_mempcpy (exp, target, mode);
6154 if (target)
6155 return target;
6156 break;
6158 case BUILT_IN_MEMSET:
6159 target = expand_builtin_memset (exp, target, mode);
6160 if (target)
6161 return target;
6162 break;
6164 case BUILT_IN_BZERO:
6165 target = expand_builtin_bzero (exp);
6166 if (target)
6167 return target;
6168 break;
6170 case BUILT_IN_STRCMP:
6171 target = expand_builtin_strcmp (exp, target);
6172 if (target)
6173 return target;
6174 break;
6176 case BUILT_IN_STRNCMP:
6177 target = expand_builtin_strncmp (exp, target, mode);
6178 if (target)
6179 return target;
6180 break;
6182 case BUILT_IN_BCMP:
6183 case BUILT_IN_MEMCMP:
6184 target = expand_builtin_memcmp (exp, target, mode);
6185 if (target)
6186 return target;
6187 break;
6189 case BUILT_IN_SETJMP:
6190 /* This should have been lowered to the builtins below. */
6191 gcc_unreachable ();
6193 case BUILT_IN_SETJMP_SETUP:
6194 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6195 and the receiver label. */
6196 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6198 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6199 VOIDmode, EXPAND_NORMAL);
6200 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6201 rtx label_r = label_rtx (label);
6203 /* This is copied from the handling of non-local gotos. */
6204 expand_builtin_setjmp_setup (buf_addr, label_r);
6205 nonlocal_goto_handler_labels
6206 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6207 nonlocal_goto_handler_labels);
6208 /* ??? Do not let expand_label treat us as such since we would
6209 not want to be both on the list of non-local labels and on
6210 the list of forced labels. */
6211 FORCED_LABEL (label) = 0;
6212 return const0_rtx;
6214 break;
6216 case BUILT_IN_SETJMP_DISPATCHER:
6217 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6218 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6220 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6221 rtx label_r = label_rtx (label);
6223 /* Remove the dispatcher label from the list of non-local labels
6224 since the receiver labels have been added to it above. */
6225 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6226 return const0_rtx;
6228 break;
6230 case BUILT_IN_SETJMP_RECEIVER:
6231 /* __builtin_setjmp_receiver is passed the receiver label. */
6232 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6234 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6235 rtx label_r = label_rtx (label);
6237 expand_builtin_setjmp_receiver (label_r);
6238 return const0_rtx;
6240 break;
6242 /* __builtin_longjmp is passed a pointer to an array of five words.
6243 It's similar to the C library longjmp function but works with
6244 __builtin_setjmp above. */
6245 case BUILT_IN_LONGJMP:
6246 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6248 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6249 VOIDmode, EXPAND_NORMAL);
6250 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6252 if (value != const1_rtx)
6254 error ("%<__builtin_longjmp%> second argument must be 1");
6255 return const0_rtx;
6258 expand_builtin_longjmp (buf_addr, value);
6259 return const0_rtx;
6261 break;
6263 case BUILT_IN_NONLOCAL_GOTO:
6264 target = expand_builtin_nonlocal_goto (exp);
6265 if (target)
6266 return target;
6267 break;
6269 /* This updates the setjmp buffer that is its argument with the value
6270 of the current stack pointer. */
6271 case BUILT_IN_UPDATE_SETJMP_BUF:
6272 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6274 rtx buf_addr
6275 = expand_normal (CALL_EXPR_ARG (exp, 0));
6277 expand_builtin_update_setjmp_buf (buf_addr);
6278 return const0_rtx;
6280 break;
6282 case BUILT_IN_TRAP:
6283 expand_builtin_trap ();
6284 return const0_rtx;
6286 case BUILT_IN_UNREACHABLE:
6287 expand_builtin_unreachable ();
6288 return const0_rtx;
6290 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6291 case BUILT_IN_SIGNBITD32:
6292 case BUILT_IN_SIGNBITD64:
6293 case BUILT_IN_SIGNBITD128:
6294 target = expand_builtin_signbit (exp, target);
6295 if (target)
6296 return target;
6297 break;
6299 /* Various hooks for the DWARF 2 __throw routine. */
6300 case BUILT_IN_UNWIND_INIT:
6301 expand_builtin_unwind_init ();
6302 return const0_rtx;
6303 case BUILT_IN_DWARF_CFA:
6304 return virtual_cfa_rtx;
6305 #ifdef DWARF2_UNWIND_INFO
6306 case BUILT_IN_DWARF_SP_COLUMN:
6307 return expand_builtin_dwarf_sp_column ();
6308 case BUILT_IN_INIT_DWARF_REG_SIZES:
6309 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6310 return const0_rtx;
6311 #endif
6312 case BUILT_IN_FROB_RETURN_ADDR:
6313 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6314 case BUILT_IN_EXTRACT_RETURN_ADDR:
6315 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6316 case BUILT_IN_EH_RETURN:
6317 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6318 CALL_EXPR_ARG (exp, 1));
6319 return const0_rtx;
6320 #ifdef EH_RETURN_DATA_REGNO
6321 case BUILT_IN_EH_RETURN_DATA_REGNO:
6322 return expand_builtin_eh_return_data_regno (exp);
6323 #endif
6324 case BUILT_IN_EXTEND_POINTER:
6325 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6326 case BUILT_IN_EH_POINTER:
6327 return expand_builtin_eh_pointer (exp);
6328 case BUILT_IN_EH_FILTER:
6329 return expand_builtin_eh_filter (exp);
6330 case BUILT_IN_EH_COPY_VALUES:
6331 return expand_builtin_eh_copy_values (exp);
6333 case BUILT_IN_VA_START:
6334 return expand_builtin_va_start (exp);
6335 case BUILT_IN_VA_END:
6336 return expand_builtin_va_end (exp);
6337 case BUILT_IN_VA_COPY:
6338 return expand_builtin_va_copy (exp);
6339 case BUILT_IN_EXPECT:
6340 return expand_builtin_expect (exp, target);
6341 case BUILT_IN_PREFETCH:
6342 expand_builtin_prefetch (exp);
6343 return const0_rtx;
6345 case BUILT_IN_INIT_TRAMPOLINE:
6346 return expand_builtin_init_trampoline (exp);
6347 case BUILT_IN_ADJUST_TRAMPOLINE:
6348 return expand_builtin_adjust_trampoline (exp);
6350 case BUILT_IN_FORK:
6351 case BUILT_IN_EXECL:
6352 case BUILT_IN_EXECV:
6353 case BUILT_IN_EXECLP:
6354 case BUILT_IN_EXECLE:
6355 case BUILT_IN_EXECVP:
6356 case BUILT_IN_EXECVE:
6357 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6358 if (target)
6359 return target;
6360 break;
6362 case BUILT_IN_FETCH_AND_ADD_1:
6363 case BUILT_IN_FETCH_AND_ADD_2:
6364 case BUILT_IN_FETCH_AND_ADD_4:
6365 case BUILT_IN_FETCH_AND_ADD_8:
6366 case BUILT_IN_FETCH_AND_ADD_16:
6367 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6368 target = expand_builtin_sync_operation (mode, exp, PLUS,
6369 false, target, ignore);
6370 if (target)
6371 return target;
6372 break;
6374 case BUILT_IN_FETCH_AND_SUB_1:
6375 case BUILT_IN_FETCH_AND_SUB_2:
6376 case BUILT_IN_FETCH_AND_SUB_4:
6377 case BUILT_IN_FETCH_AND_SUB_8:
6378 case BUILT_IN_FETCH_AND_SUB_16:
6379 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6380 target = expand_builtin_sync_operation (mode, exp, MINUS,
6381 false, target, ignore);
6382 if (target)
6383 return target;
6384 break;
6386 case BUILT_IN_FETCH_AND_OR_1:
6387 case BUILT_IN_FETCH_AND_OR_2:
6388 case BUILT_IN_FETCH_AND_OR_4:
6389 case BUILT_IN_FETCH_AND_OR_8:
6390 case BUILT_IN_FETCH_AND_OR_16:
6391 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6392 target = expand_builtin_sync_operation (mode, exp, IOR,
6393 false, target, ignore);
6394 if (target)
6395 return target;
6396 break;
6398 case BUILT_IN_FETCH_AND_AND_1:
6399 case BUILT_IN_FETCH_AND_AND_2:
6400 case BUILT_IN_FETCH_AND_AND_4:
6401 case BUILT_IN_FETCH_AND_AND_8:
6402 case BUILT_IN_FETCH_AND_AND_16:
6403 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6404 target = expand_builtin_sync_operation (mode, exp, AND,
6405 false, target, ignore);
6406 if (target)
6407 return target;
6408 break;
6410 case BUILT_IN_FETCH_AND_XOR_1:
6411 case BUILT_IN_FETCH_AND_XOR_2:
6412 case BUILT_IN_FETCH_AND_XOR_4:
6413 case BUILT_IN_FETCH_AND_XOR_8:
6414 case BUILT_IN_FETCH_AND_XOR_16:
6415 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6416 target = expand_builtin_sync_operation (mode, exp, XOR,
6417 false, target, ignore);
6418 if (target)
6419 return target;
6420 break;
6422 case BUILT_IN_FETCH_AND_NAND_1:
6423 case BUILT_IN_FETCH_AND_NAND_2:
6424 case BUILT_IN_FETCH_AND_NAND_4:
6425 case BUILT_IN_FETCH_AND_NAND_8:
6426 case BUILT_IN_FETCH_AND_NAND_16:
6427 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6428 target = expand_builtin_sync_operation (mode, exp, NOT,
6429 false, target, ignore);
6430 if (target)
6431 return target;
6432 break;
6434 case BUILT_IN_ADD_AND_FETCH_1:
6435 case BUILT_IN_ADD_AND_FETCH_2:
6436 case BUILT_IN_ADD_AND_FETCH_4:
6437 case BUILT_IN_ADD_AND_FETCH_8:
6438 case BUILT_IN_ADD_AND_FETCH_16:
6439 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6440 target = expand_builtin_sync_operation (mode, exp, PLUS,
6441 true, target, ignore);
6442 if (target)
6443 return target;
6444 break;
6446 case BUILT_IN_SUB_AND_FETCH_1:
6447 case BUILT_IN_SUB_AND_FETCH_2:
6448 case BUILT_IN_SUB_AND_FETCH_4:
6449 case BUILT_IN_SUB_AND_FETCH_8:
6450 case BUILT_IN_SUB_AND_FETCH_16:
6451 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6452 target = expand_builtin_sync_operation (mode, exp, MINUS,
6453 true, target, ignore);
6454 if (target)
6455 return target;
6456 break;
6458 case BUILT_IN_OR_AND_FETCH_1:
6459 case BUILT_IN_OR_AND_FETCH_2:
6460 case BUILT_IN_OR_AND_FETCH_4:
6461 case BUILT_IN_OR_AND_FETCH_8:
6462 case BUILT_IN_OR_AND_FETCH_16:
6463 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6464 target = expand_builtin_sync_operation (mode, exp, IOR,
6465 true, target, ignore);
6466 if (target)
6467 return target;
6468 break;
6470 case BUILT_IN_AND_AND_FETCH_1:
6471 case BUILT_IN_AND_AND_FETCH_2:
6472 case BUILT_IN_AND_AND_FETCH_4:
6473 case BUILT_IN_AND_AND_FETCH_8:
6474 case BUILT_IN_AND_AND_FETCH_16:
6475 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6476 target = expand_builtin_sync_operation (mode, exp, AND,
6477 true, target, ignore);
6478 if (target)
6479 return target;
6480 break;
6482 case BUILT_IN_XOR_AND_FETCH_1:
6483 case BUILT_IN_XOR_AND_FETCH_2:
6484 case BUILT_IN_XOR_AND_FETCH_4:
6485 case BUILT_IN_XOR_AND_FETCH_8:
6486 case BUILT_IN_XOR_AND_FETCH_16:
6487 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6488 target = expand_builtin_sync_operation (mode, exp, XOR,
6489 true, target, ignore);
6490 if (target)
6491 return target;
6492 break;
6494 case BUILT_IN_NAND_AND_FETCH_1:
6495 case BUILT_IN_NAND_AND_FETCH_2:
6496 case BUILT_IN_NAND_AND_FETCH_4:
6497 case BUILT_IN_NAND_AND_FETCH_8:
6498 case BUILT_IN_NAND_AND_FETCH_16:
6499 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6500 target = expand_builtin_sync_operation (mode, exp, NOT,
6501 true, target, ignore);
6502 if (target)
6503 return target;
6504 break;
6506 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6507 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6508 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6509 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6510 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6511 if (mode == VOIDmode)
6512 mode = TYPE_MODE (boolean_type_node);
6513 if (!target || !register_operand (target, mode))
6514 target = gen_reg_rtx (mode);
6516 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6517 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6518 if (target)
6519 return target;
6520 break;
6522 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6523 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6524 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6525 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6526 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6527 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6528 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6529 if (target)
6530 return target;
6531 break;
6533 case BUILT_IN_LOCK_TEST_AND_SET_1:
6534 case BUILT_IN_LOCK_TEST_AND_SET_2:
6535 case BUILT_IN_LOCK_TEST_AND_SET_4:
6536 case BUILT_IN_LOCK_TEST_AND_SET_8:
6537 case BUILT_IN_LOCK_TEST_AND_SET_16:
6538 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6539 target = expand_builtin_lock_test_and_set (mode, exp, target);
6540 if (target)
6541 return target;
6542 break;
6544 case BUILT_IN_LOCK_RELEASE_1:
6545 case BUILT_IN_LOCK_RELEASE_2:
6546 case BUILT_IN_LOCK_RELEASE_4:
6547 case BUILT_IN_LOCK_RELEASE_8:
6548 case BUILT_IN_LOCK_RELEASE_16:
6549 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6550 expand_builtin_lock_release (mode, exp);
6551 return const0_rtx;
6553 case BUILT_IN_SYNCHRONIZE:
6554 expand_builtin_synchronize ();
6555 return const0_rtx;
6557 case BUILT_IN_OBJECT_SIZE:
6558 return expand_builtin_object_size (exp);
6560 case BUILT_IN_MEMCPY_CHK:
6561 case BUILT_IN_MEMPCPY_CHK:
6562 case BUILT_IN_MEMMOVE_CHK:
6563 case BUILT_IN_MEMSET_CHK:
6564 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6565 if (target)
6566 return target;
6567 break;
6569 case BUILT_IN_STRCPY_CHK:
6570 case BUILT_IN_STPCPY_CHK:
6571 case BUILT_IN_STRNCPY_CHK:
6572 case BUILT_IN_STRCAT_CHK:
6573 case BUILT_IN_STRNCAT_CHK:
6574 case BUILT_IN_SNPRINTF_CHK:
6575 case BUILT_IN_VSNPRINTF_CHK:
6576 maybe_emit_chk_warning (exp, fcode);
6577 break;
6579 case BUILT_IN_SPRINTF_CHK:
6580 case BUILT_IN_VSPRINTF_CHK:
6581 maybe_emit_sprintf_chk_warning (exp, fcode);
6582 break;
6584 case BUILT_IN_FREE:
6585 maybe_emit_free_warning (exp);
6586 break;
6588 default: /* just do library call, if unknown builtin */
6589 break;
6592 /* The switch statement above can drop through to cause the function
6593 to be called normally. */
6594 return expand_call (exp, target, ignore);
6597 /* Determine whether a tree node represents a call to a built-in
6598 function. If the tree T is a call to a built-in function with
6599 the right number of arguments of the appropriate types, return
6600 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6601 Otherwise the return value is END_BUILTINS. */
6603 enum built_in_function
6604 builtin_mathfn_code (const_tree t)
6606 const_tree fndecl, arg, parmlist;
6607 const_tree argtype, parmtype;
6608 const_call_expr_arg_iterator iter;
6610 if (TREE_CODE (t) != CALL_EXPR
6611 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6612 return END_BUILTINS;
6614 fndecl = get_callee_fndecl (t);
6615 if (fndecl == NULL_TREE
6616 || TREE_CODE (fndecl) != FUNCTION_DECL
6617 || ! DECL_BUILT_IN (fndecl)
6618 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6619 return END_BUILTINS;
6621 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6622 init_const_call_expr_arg_iterator (t, &iter);
6623 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6625 /* If a function doesn't take a variable number of arguments,
6626 the last element in the list will have type `void'. */
6627 parmtype = TREE_VALUE (parmlist);
6628 if (VOID_TYPE_P (parmtype))
6630 if (more_const_call_expr_args_p (&iter))
6631 return END_BUILTINS;
6632 return DECL_FUNCTION_CODE (fndecl);
6635 if (! more_const_call_expr_args_p (&iter))
6636 return END_BUILTINS;
6638 arg = next_const_call_expr_arg (&iter);
6639 argtype = TREE_TYPE (arg);
6641 if (SCALAR_FLOAT_TYPE_P (parmtype))
6643 if (! SCALAR_FLOAT_TYPE_P (argtype))
6644 return END_BUILTINS;
6646 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6648 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6649 return END_BUILTINS;
6651 else if (POINTER_TYPE_P (parmtype))
6653 if (! POINTER_TYPE_P (argtype))
6654 return END_BUILTINS;
6656 else if (INTEGRAL_TYPE_P (parmtype))
6658 if (! INTEGRAL_TYPE_P (argtype))
6659 return END_BUILTINS;
6661 else
6662 return END_BUILTINS;
6665 /* Variable-length argument list. */
6666 return DECL_FUNCTION_CODE (fndecl);
6669 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6670 evaluate to a constant. */
6672 static tree
6673 fold_builtin_constant_p (tree arg)
6675 /* We return 1 for a numeric type that's known to be a constant
6676 value at compile-time or for an aggregate type that's a
6677 literal constant. */
6678 STRIP_NOPS (arg);
6680 /* If we know this is a constant, emit the constant of one. */
6681 if (CONSTANT_CLASS_P (arg)
6682 || (TREE_CODE (arg) == CONSTRUCTOR
6683 && TREE_CONSTANT (arg)))
6684 return integer_one_node;
6685 if (TREE_CODE (arg) == ADDR_EXPR)
6687 tree op = TREE_OPERAND (arg, 0);
6688 if (TREE_CODE (op) == STRING_CST
6689 || (TREE_CODE (op) == ARRAY_REF
6690 && integer_zerop (TREE_OPERAND (op, 1))
6691 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6692 return integer_one_node;
6695 /* If this expression has side effects, show we don't know it to be a
6696 constant. Likewise if it's a pointer or aggregate type since in
6697 those case we only want literals, since those are only optimized
6698 when generating RTL, not later.
6699 And finally, if we are compiling an initializer, not code, we
6700 need to return a definite result now; there's not going to be any
6701 more optimization done. */
6702 if (TREE_SIDE_EFFECTS (arg)
6703 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6704 || POINTER_TYPE_P (TREE_TYPE (arg))
6705 || cfun == 0
6706 || folding_initializer)
6707 return integer_zero_node;
6709 return NULL_TREE;
6712 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6713 return it as a truthvalue. */
6715 static tree
6716 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6718 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6720 fn = built_in_decls[BUILT_IN_EXPECT];
6721 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6722 ret_type = TREE_TYPE (TREE_TYPE (fn));
6723 pred_type = TREE_VALUE (arg_types);
6724 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6726 pred = fold_convert_loc (loc, pred_type, pred);
6727 expected = fold_convert_loc (loc, expected_type, expected);
6728 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6730 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6731 build_int_cst (ret_type, 0));
6734 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6735 NULL_TREE if no simplification is possible. */
6737 static tree
6738 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6740 tree inner, fndecl;
6741 enum tree_code code;
6743 /* If this is a builtin_expect within a builtin_expect keep the
6744 inner one. See through a comparison against a constant. It
6745 might have been added to create a thruthvalue. */
6746 inner = arg0;
6747 if (COMPARISON_CLASS_P (inner)
6748 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6749 inner = TREE_OPERAND (inner, 0);
6751 if (TREE_CODE (inner) == CALL_EXPR
6752 && (fndecl = get_callee_fndecl (inner))
6753 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6754 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6755 return arg0;
6757 /* Distribute the expected value over short-circuiting operators.
6758 See through the cast from truthvalue_type_node to long. */
6759 inner = arg0;
6760 while (TREE_CODE (inner) == NOP_EXPR
6761 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6762 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6763 inner = TREE_OPERAND (inner, 0);
6765 code = TREE_CODE (inner);
6766 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6768 tree op0 = TREE_OPERAND (inner, 0);
6769 tree op1 = TREE_OPERAND (inner, 1);
6771 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6772 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6773 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6775 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6778 /* If the argument isn't invariant then there's nothing else we can do. */
6779 if (!TREE_CONSTANT (arg0))
6780 return NULL_TREE;
6782 /* If we expect that a comparison against the argument will fold to
6783 a constant return the constant. In practice, this means a true
6784 constant or the address of a non-weak symbol. */
6785 inner = arg0;
6786 STRIP_NOPS (inner);
6787 if (TREE_CODE (inner) == ADDR_EXPR)
6791 inner = TREE_OPERAND (inner, 0);
6793 while (TREE_CODE (inner) == COMPONENT_REF
6794 || TREE_CODE (inner) == ARRAY_REF);
6795 if ((TREE_CODE (inner) == VAR_DECL
6796 || TREE_CODE (inner) == FUNCTION_DECL)
6797 && DECL_WEAK (inner))
6798 return NULL_TREE;
6801 /* Otherwise, ARG0 already has the proper type for the return value. */
6802 return arg0;
6805 /* Fold a call to __builtin_classify_type with argument ARG. */
6807 static tree
6808 fold_builtin_classify_type (tree arg)
6810 if (arg == 0)
6811 return build_int_cst (NULL_TREE, no_type_class);
6813 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6816 /* Fold a call to __builtin_strlen with argument ARG. */
6818 static tree
6819 fold_builtin_strlen (location_t loc, tree type, tree arg)
6821 if (!validate_arg (arg, POINTER_TYPE))
6822 return NULL_TREE;
6823 else
6825 tree len = c_strlen (arg, 0);
6827 if (len)
6828 return fold_convert_loc (loc, type, len);
6830 return NULL_TREE;
6834 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6836 static tree
6837 fold_builtin_inf (location_t loc, tree type, int warn)
6839 REAL_VALUE_TYPE real;
6841 /* __builtin_inff is intended to be usable to define INFINITY on all
6842 targets. If an infinity is not available, INFINITY expands "to a
6843 positive constant of type float that overflows at translation
6844 time", footnote "In this case, using INFINITY will violate the
6845 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6846 Thus we pedwarn to ensure this constraint violation is
6847 diagnosed. */
6848 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6849 pedwarn (loc, 0, "target format does not support infinity");
6851 real_inf (&real);
6852 return build_real (type, real);
6855 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6857 static tree
6858 fold_builtin_nan (tree arg, tree type, int quiet)
6860 REAL_VALUE_TYPE real;
6861 const char *str;
6863 if (!validate_arg (arg, POINTER_TYPE))
6864 return NULL_TREE;
6865 str = c_getstr (arg);
6866 if (!str)
6867 return NULL_TREE;
6869 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6870 return NULL_TREE;
6872 return build_real (type, real);
6875 /* Return true if the floating point expression T has an integer value.
6876 We also allow +Inf, -Inf and NaN to be considered integer values. */
6878 static bool
6879 integer_valued_real_p (tree t)
6881 switch (TREE_CODE (t))
6883 case FLOAT_EXPR:
6884 return true;
6886 case ABS_EXPR:
6887 case SAVE_EXPR:
6888 return integer_valued_real_p (TREE_OPERAND (t, 0));
6890 case COMPOUND_EXPR:
6891 case MODIFY_EXPR:
6892 case BIND_EXPR:
6893 return integer_valued_real_p (TREE_OPERAND (t, 1));
6895 case PLUS_EXPR:
6896 case MINUS_EXPR:
6897 case MULT_EXPR:
6898 case MIN_EXPR:
6899 case MAX_EXPR:
6900 return integer_valued_real_p (TREE_OPERAND (t, 0))
6901 && integer_valued_real_p (TREE_OPERAND (t, 1));
6903 case COND_EXPR:
6904 return integer_valued_real_p (TREE_OPERAND (t, 1))
6905 && integer_valued_real_p (TREE_OPERAND (t, 2));
6907 case REAL_CST:
6908 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6910 case NOP_EXPR:
6912 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6913 if (TREE_CODE (type) == INTEGER_TYPE)
6914 return true;
6915 if (TREE_CODE (type) == REAL_TYPE)
6916 return integer_valued_real_p (TREE_OPERAND (t, 0));
6917 break;
6920 case CALL_EXPR:
6921 switch (builtin_mathfn_code (t))
6923 CASE_FLT_FN (BUILT_IN_CEIL):
6924 CASE_FLT_FN (BUILT_IN_FLOOR):
6925 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6926 CASE_FLT_FN (BUILT_IN_RINT):
6927 CASE_FLT_FN (BUILT_IN_ROUND):
6928 CASE_FLT_FN (BUILT_IN_TRUNC):
6929 return true;
6931 CASE_FLT_FN (BUILT_IN_FMIN):
6932 CASE_FLT_FN (BUILT_IN_FMAX):
6933 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6934 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6936 default:
6937 break;
6939 break;
6941 default:
6942 break;
6944 return false;
6947 /* FNDECL is assumed to be a builtin where truncation can be propagated
6948 across (for instance floor((double)f) == (double)floorf (f).
6949 Do the transformation for a call with argument ARG. */
6951 static tree
6952 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6954 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6956 if (!validate_arg (arg, REAL_TYPE))
6957 return NULL_TREE;
6959 /* Integer rounding functions are idempotent. */
6960 if (fcode == builtin_mathfn_code (arg))
6961 return arg;
6963 /* If argument is already integer valued, and we don't need to worry
6964 about setting errno, there's no need to perform rounding. */
6965 if (! flag_errno_math && integer_valued_real_p (arg))
6966 return arg;
6968 if (optimize)
6970 tree arg0 = strip_float_extensions (arg);
6971 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6972 tree newtype = TREE_TYPE (arg0);
6973 tree decl;
6975 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6976 && (decl = mathfn_built_in (newtype, fcode)))
6977 return fold_convert_loc (loc, ftype,
6978 build_call_expr_loc (loc, decl, 1,
6979 fold_convert_loc (loc,
6980 newtype,
6981 arg0)));
6983 return NULL_TREE;
6986 /* FNDECL is assumed to be builtin which can narrow the FP type of
6987 the argument, for instance lround((double)f) -> lroundf (f).
6988 Do the transformation for a call with argument ARG. */
6990 static tree
6991 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6993 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6995 if (!validate_arg (arg, REAL_TYPE))
6996 return NULL_TREE;
6998 /* If argument is already integer valued, and we don't need to worry
6999 about setting errno, there's no need to perform rounding. */
7000 if (! flag_errno_math && integer_valued_real_p (arg))
7001 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7002 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7004 if (optimize)
7006 tree ftype = TREE_TYPE (arg);
7007 tree arg0 = strip_float_extensions (arg);
7008 tree newtype = TREE_TYPE (arg0);
7009 tree decl;
7011 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7012 && (decl = mathfn_built_in (newtype, fcode)))
7013 return build_call_expr_loc (loc, decl, 1,
7014 fold_convert_loc (loc, newtype, arg0));
7017 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7018 sizeof (long long) == sizeof (long). */
7019 if (TYPE_PRECISION (long_long_integer_type_node)
7020 == TYPE_PRECISION (long_integer_type_node))
7022 tree newfn = NULL_TREE;
7023 switch (fcode)
7025 CASE_FLT_FN (BUILT_IN_LLCEIL):
7026 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7027 break;
7029 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7030 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7031 break;
7033 CASE_FLT_FN (BUILT_IN_LLROUND):
7034 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7035 break;
7037 CASE_FLT_FN (BUILT_IN_LLRINT):
7038 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7039 break;
7041 default:
7042 break;
7045 if (newfn)
7047 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7048 return fold_convert_loc (loc,
7049 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7053 return NULL_TREE;
7056 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7057 return type. Return NULL_TREE if no simplification can be made. */
7059 static tree
7060 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7062 tree res;
7064 if (!validate_arg (arg, COMPLEX_TYPE)
7065 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7066 return NULL_TREE;
7068 /* Calculate the result when the argument is a constant. */
7069 if (TREE_CODE (arg) == COMPLEX_CST
7070 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7071 type, mpfr_hypot)))
7072 return res;
7074 if (TREE_CODE (arg) == COMPLEX_EXPR)
7076 tree real = TREE_OPERAND (arg, 0);
7077 tree imag = TREE_OPERAND (arg, 1);
7079 /* If either part is zero, cabs is fabs of the other. */
7080 if (real_zerop (real))
7081 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7082 if (real_zerop (imag))
7083 return fold_build1_loc (loc, ABS_EXPR, type, real);
7085 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7086 if (flag_unsafe_math_optimizations
7087 && operand_equal_p (real, imag, OEP_PURE_SAME))
7089 const REAL_VALUE_TYPE sqrt2_trunc
7090 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7091 STRIP_NOPS (real);
7092 return fold_build2_loc (loc, MULT_EXPR, type,
7093 fold_build1_loc (loc, ABS_EXPR, type, real),
7094 build_real (type, sqrt2_trunc));
7098 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7099 if (TREE_CODE (arg) == NEGATE_EXPR
7100 || TREE_CODE (arg) == CONJ_EXPR)
7101 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7103 /* Don't do this when optimizing for size. */
7104 if (flag_unsafe_math_optimizations
7105 && optimize && optimize_function_for_speed_p (cfun))
7107 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7109 if (sqrtfn != NULL_TREE)
7111 tree rpart, ipart, result;
7113 arg = builtin_save_expr (arg);
7115 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7116 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7118 rpart = builtin_save_expr (rpart);
7119 ipart = builtin_save_expr (ipart);
7121 result = fold_build2_loc (loc, PLUS_EXPR, type,
7122 fold_build2_loc (loc, MULT_EXPR, type,
7123 rpart, rpart),
7124 fold_build2_loc (loc, MULT_EXPR, type,
7125 ipart, ipart));
7127 return build_call_expr_loc (loc, sqrtfn, 1, result);
7131 return NULL_TREE;
7134 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7135 complex tree type of the result. If NEG is true, the imaginary
7136 zero is negative. */
7138 static tree
7139 build_complex_cproj (tree type, bool neg)
7141 REAL_VALUE_TYPE rinf, rzero = dconst0;
7143 real_inf (&rinf);
7144 rzero.sign = neg;
7145 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7146 build_real (TREE_TYPE (type), rzero));
7149 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7150 return type. Return NULL_TREE if no simplification can be made. */
7152 static tree
7153 fold_builtin_cproj (location_t loc, tree arg, tree type)
7155 if (!validate_arg (arg, COMPLEX_TYPE)
7156 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7157 return NULL_TREE;
7159 /* If there are no infinities, return arg. */
7160 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7161 return non_lvalue_loc (loc, arg);
7163 /* Calculate the result when the argument is a constant. */
7164 if (TREE_CODE (arg) == COMPLEX_CST)
7166 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7167 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7169 if (real_isinf (real) || real_isinf (imag))
7170 return build_complex_cproj (type, imag->sign);
7171 else
7172 return arg;
7174 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7176 tree real = TREE_OPERAND (arg, 0);
7177 tree imag = TREE_OPERAND (arg, 1);
7179 STRIP_NOPS (real);
7180 STRIP_NOPS (imag);
7182 /* If the real part is inf and the imag part is known to be
7183 nonnegative, return (inf + 0i). Remember side-effects are
7184 possible in the imag part. */
7185 if (TREE_CODE (real) == REAL_CST
7186 && real_isinf (TREE_REAL_CST_PTR (real))
7187 && tree_expr_nonnegative_p (imag))
7188 return omit_one_operand_loc (loc, type,
7189 build_complex_cproj (type, false),
7190 arg);
7192 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7193 Remember side-effects are possible in the real part. */
7194 if (TREE_CODE (imag) == REAL_CST
7195 && real_isinf (TREE_REAL_CST_PTR (imag)))
7196 return
7197 omit_one_operand_loc (loc, type,
7198 build_complex_cproj (type, TREE_REAL_CST_PTR
7199 (imag)->sign), arg);
7202 return NULL_TREE;
7205 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7206 Return NULL_TREE if no simplification can be made. */
7208 static tree
7209 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7212 enum built_in_function fcode;
7213 tree res;
7215 if (!validate_arg (arg, REAL_TYPE))
7216 return NULL_TREE;
7218 /* Calculate the result when the argument is a constant. */
7219 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7220 return res;
7222 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7223 fcode = builtin_mathfn_code (arg);
7224 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7226 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7227 arg = fold_build2_loc (loc, MULT_EXPR, type,
7228 CALL_EXPR_ARG (arg, 0),
7229 build_real (type, dconsthalf));
7230 return build_call_expr_loc (loc, expfn, 1, arg);
7233 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7234 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7236 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7238 if (powfn)
7240 tree arg0 = CALL_EXPR_ARG (arg, 0);
7241 tree tree_root;
7242 /* The inner root was either sqrt or cbrt. */
7243 /* This was a conditional expression but it triggered a bug
7244 in Sun C 5.5. */
7245 REAL_VALUE_TYPE dconstroot;
7246 if (BUILTIN_SQRT_P (fcode))
7247 dconstroot = dconsthalf;
7248 else
7249 dconstroot = dconst_third ();
7251 /* Adjust for the outer root. */
7252 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7253 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7254 tree_root = build_real (type, dconstroot);
7255 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7259 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7260 if (flag_unsafe_math_optimizations
7261 && (fcode == BUILT_IN_POW
7262 || fcode == BUILT_IN_POWF
7263 || fcode == BUILT_IN_POWL))
7265 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7266 tree arg0 = CALL_EXPR_ARG (arg, 0);
7267 tree arg1 = CALL_EXPR_ARG (arg, 1);
7268 tree narg1;
7269 if (!tree_expr_nonnegative_p (arg0))
7270 arg0 = build1 (ABS_EXPR, type, arg0);
7271 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7272 build_real (type, dconsthalf));
7273 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7276 return NULL_TREE;
7279 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7280 Return NULL_TREE if no simplification can be made. */
7282 static tree
7283 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7285 const enum built_in_function fcode = builtin_mathfn_code (arg);
7286 tree res;
7288 if (!validate_arg (arg, REAL_TYPE))
7289 return NULL_TREE;
7291 /* Calculate the result when the argument is a constant. */
7292 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7293 return res;
7295 if (flag_unsafe_math_optimizations)
7297 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7298 if (BUILTIN_EXPONENT_P (fcode))
7300 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7301 const REAL_VALUE_TYPE third_trunc =
7302 real_value_truncate (TYPE_MODE (type), dconst_third ());
7303 arg = fold_build2_loc (loc, MULT_EXPR, type,
7304 CALL_EXPR_ARG (arg, 0),
7305 build_real (type, third_trunc));
7306 return build_call_expr_loc (loc, expfn, 1, arg);
7309 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7310 if (BUILTIN_SQRT_P (fcode))
7312 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7314 if (powfn)
7316 tree arg0 = CALL_EXPR_ARG (arg, 0);
7317 tree tree_root;
7318 REAL_VALUE_TYPE dconstroot = dconst_third ();
7320 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7321 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7322 tree_root = build_real (type, dconstroot);
7323 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7327 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7328 if (BUILTIN_CBRT_P (fcode))
7330 tree arg0 = CALL_EXPR_ARG (arg, 0);
7331 if (tree_expr_nonnegative_p (arg0))
7333 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7335 if (powfn)
7337 tree tree_root;
7338 REAL_VALUE_TYPE dconstroot;
7340 real_arithmetic (&dconstroot, MULT_EXPR,
7341 dconst_third_ptr (), dconst_third_ptr ());
7342 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7343 tree_root = build_real (type, dconstroot);
7344 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7349 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7350 if (fcode == BUILT_IN_POW
7351 || fcode == BUILT_IN_POWF
7352 || fcode == BUILT_IN_POWL)
7354 tree arg00 = CALL_EXPR_ARG (arg, 0);
7355 tree arg01 = CALL_EXPR_ARG (arg, 1);
7356 if (tree_expr_nonnegative_p (arg00))
7358 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7359 const REAL_VALUE_TYPE dconstroot
7360 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7361 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7362 build_real (type, dconstroot));
7363 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7367 return NULL_TREE;
7370 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7371 TYPE is the type of the return value. Return NULL_TREE if no
7372 simplification can be made. */
7374 static tree
7375 fold_builtin_cos (location_t loc,
7376 tree arg, tree type, tree fndecl)
7378 tree res, narg;
7380 if (!validate_arg (arg, REAL_TYPE))
7381 return NULL_TREE;
7383 /* Calculate the result when the argument is a constant. */
7384 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7385 return res;
7387 /* Optimize cos(-x) into cos (x). */
7388 if ((narg = fold_strip_sign_ops (arg)))
7389 return build_call_expr_loc (loc, fndecl, 1, narg);
7391 return NULL_TREE;
7394 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7395 Return NULL_TREE if no simplification can be made. */
7397 static tree
7398 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7400 if (validate_arg (arg, REAL_TYPE))
7402 tree res, narg;
7404 /* Calculate the result when the argument is a constant. */
7405 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7406 return res;
7408 /* Optimize cosh(-x) into cosh (x). */
7409 if ((narg = fold_strip_sign_ops (arg)))
7410 return build_call_expr_loc (loc, fndecl, 1, narg);
7413 return NULL_TREE;
7416 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7417 argument ARG. TYPE is the type of the return value. Return
7418 NULL_TREE if no simplification can be made. */
7420 static tree
7421 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7422 bool hyper)
7424 if (validate_arg (arg, COMPLEX_TYPE)
7425 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7427 tree tmp;
7429 /* Calculate the result when the argument is a constant. */
7430 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7431 return tmp;
7433 /* Optimize fn(-x) into fn(x). */
7434 if ((tmp = fold_strip_sign_ops (arg)))
7435 return build_call_expr_loc (loc, fndecl, 1, tmp);
7438 return NULL_TREE;
7441 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7442 Return NULL_TREE if no simplification can be made. */
7444 static tree
7445 fold_builtin_tan (tree arg, tree type)
7447 enum built_in_function fcode;
7448 tree res;
7450 if (!validate_arg (arg, REAL_TYPE))
7451 return NULL_TREE;
7453 /* Calculate the result when the argument is a constant. */
7454 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7455 return res;
7457 /* Optimize tan(atan(x)) = x. */
7458 fcode = builtin_mathfn_code (arg);
7459 if (flag_unsafe_math_optimizations
7460 && (fcode == BUILT_IN_ATAN
7461 || fcode == BUILT_IN_ATANF
7462 || fcode == BUILT_IN_ATANL))
7463 return CALL_EXPR_ARG (arg, 0);
7465 return NULL_TREE;
7468 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7469 NULL_TREE if no simplification can be made. */
7471 static tree
7472 fold_builtin_sincos (location_t loc,
7473 tree arg0, tree arg1, tree arg2)
7475 tree type;
7476 tree res, fn, call;
7478 if (!validate_arg (arg0, REAL_TYPE)
7479 || !validate_arg (arg1, POINTER_TYPE)
7480 || !validate_arg (arg2, POINTER_TYPE))
7481 return NULL_TREE;
7483 type = TREE_TYPE (arg0);
7485 /* Calculate the result when the argument is a constant. */
7486 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7487 return res;
7489 /* Canonicalize sincos to cexpi. */
7490 if (!TARGET_C99_FUNCTIONS)
7491 return NULL_TREE;
7492 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7493 if (!fn)
7494 return NULL_TREE;
7496 call = build_call_expr_loc (loc, fn, 1, arg0);
7497 call = builtin_save_expr (call);
7499 return build2 (COMPOUND_EXPR, void_type_node,
7500 build2 (MODIFY_EXPR, void_type_node,
7501 build_fold_indirect_ref_loc (loc, arg1),
7502 build1 (IMAGPART_EXPR, type, call)),
7503 build2 (MODIFY_EXPR, void_type_node,
7504 build_fold_indirect_ref_loc (loc, arg2),
7505 build1 (REALPART_EXPR, type, call)));
7508 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7509 NULL_TREE if no simplification can be made. */
7511 static tree
7512 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7514 tree rtype;
7515 tree realp, imagp, ifn;
7516 tree res;
7518 if (!validate_arg (arg0, COMPLEX_TYPE)
7519 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7520 return NULL_TREE;
7522 /* Calculate the result when the argument is a constant. */
7523 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7524 return res;
7526 rtype = TREE_TYPE (TREE_TYPE (arg0));
7528 /* In case we can figure out the real part of arg0 and it is constant zero
7529 fold to cexpi. */
7530 if (!TARGET_C99_FUNCTIONS)
7531 return NULL_TREE;
7532 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7533 if (!ifn)
7534 return NULL_TREE;
7536 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7537 && real_zerop (realp))
7539 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7540 return build_call_expr_loc (loc, ifn, 1, narg);
7543 /* In case we can easily decompose real and imaginary parts split cexp
7544 to exp (r) * cexpi (i). */
7545 if (flag_unsafe_math_optimizations
7546 && realp)
7548 tree rfn, rcall, icall;
7550 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7551 if (!rfn)
7552 return NULL_TREE;
7554 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7555 if (!imagp)
7556 return NULL_TREE;
7558 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7559 icall = builtin_save_expr (icall);
7560 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7561 rcall = builtin_save_expr (rcall);
7562 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7563 fold_build2_loc (loc, MULT_EXPR, rtype,
7564 rcall,
7565 fold_build1_loc (loc, REALPART_EXPR,
7566 rtype, icall)),
7567 fold_build2_loc (loc, MULT_EXPR, rtype,
7568 rcall,
7569 fold_build1_loc (loc, IMAGPART_EXPR,
7570 rtype, icall)));
7573 return NULL_TREE;
7576 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7577 Return NULL_TREE if no simplification can be made. */
7579 static tree
7580 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7582 if (!validate_arg (arg, REAL_TYPE))
7583 return NULL_TREE;
7585 /* Optimize trunc of constant value. */
7586 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7588 REAL_VALUE_TYPE r, x;
7589 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7591 x = TREE_REAL_CST (arg);
7592 real_trunc (&r, TYPE_MODE (type), &x);
7593 return build_real (type, r);
7596 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7599 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7600 Return NULL_TREE if no simplification can be made. */
7602 static tree
7603 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7605 if (!validate_arg (arg, REAL_TYPE))
7606 return NULL_TREE;
7608 /* Optimize floor of constant value. */
7609 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7611 REAL_VALUE_TYPE x;
7613 x = TREE_REAL_CST (arg);
7614 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7616 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7617 REAL_VALUE_TYPE r;
7619 real_floor (&r, TYPE_MODE (type), &x);
7620 return build_real (type, r);
7624 /* Fold floor (x) where x is nonnegative to trunc (x). */
7625 if (tree_expr_nonnegative_p (arg))
7627 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7628 if (truncfn)
7629 return build_call_expr_loc (loc, truncfn, 1, arg);
7632 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7635 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7636 Return NULL_TREE if no simplification can be made. */
7638 static tree
7639 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7641 if (!validate_arg (arg, REAL_TYPE))
7642 return NULL_TREE;
7644 /* Optimize ceil of constant value. */
7645 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7647 REAL_VALUE_TYPE x;
7649 x = TREE_REAL_CST (arg);
7650 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7652 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7653 REAL_VALUE_TYPE r;
7655 real_ceil (&r, TYPE_MODE (type), &x);
7656 return build_real (type, r);
7660 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7663 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7664 Return NULL_TREE if no simplification can be made. */
7666 static tree
7667 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7669 if (!validate_arg (arg, REAL_TYPE))
7670 return NULL_TREE;
7672 /* Optimize round of constant value. */
7673 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7675 REAL_VALUE_TYPE x;
7677 x = TREE_REAL_CST (arg);
7678 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7680 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7681 REAL_VALUE_TYPE r;
7683 real_round (&r, TYPE_MODE (type), &x);
7684 return build_real (type, r);
7688 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7691 /* Fold function call to builtin lround, lroundf or lroundl (or the
7692 corresponding long long versions) and other rounding functions. ARG
7693 is the argument to the call. Return NULL_TREE if no simplification
7694 can be made. */
7696 static tree
7697 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7699 if (!validate_arg (arg, REAL_TYPE))
7700 return NULL_TREE;
7702 /* Optimize lround of constant value. */
7703 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7705 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7707 if (real_isfinite (&x))
7709 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7710 tree ftype = TREE_TYPE (arg);
7711 double_int val;
7712 REAL_VALUE_TYPE r;
7714 switch (DECL_FUNCTION_CODE (fndecl))
7716 CASE_FLT_FN (BUILT_IN_LFLOOR):
7717 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7718 real_floor (&r, TYPE_MODE (ftype), &x);
7719 break;
7721 CASE_FLT_FN (BUILT_IN_LCEIL):
7722 CASE_FLT_FN (BUILT_IN_LLCEIL):
7723 real_ceil (&r, TYPE_MODE (ftype), &x);
7724 break;
7726 CASE_FLT_FN (BUILT_IN_LROUND):
7727 CASE_FLT_FN (BUILT_IN_LLROUND):
7728 real_round (&r, TYPE_MODE (ftype), &x);
7729 break;
7731 default:
7732 gcc_unreachable ();
7735 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7736 if (double_int_fits_to_tree_p (itype, val))
7737 return double_int_to_tree (itype, val);
7741 switch (DECL_FUNCTION_CODE (fndecl))
7743 CASE_FLT_FN (BUILT_IN_LFLOOR):
7744 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7745 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7746 if (tree_expr_nonnegative_p (arg))
7747 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7748 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7749 break;
7750 default:;
7753 return fold_fixed_mathfn (loc, fndecl, arg);
7756 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7757 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7758 the argument to the call. Return NULL_TREE if no simplification can
7759 be made. */
7761 static tree
7762 fold_builtin_bitop (tree fndecl, tree arg)
7764 if (!validate_arg (arg, INTEGER_TYPE))
7765 return NULL_TREE;
7767 /* Optimize for constant argument. */
7768 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7770 HOST_WIDE_INT hi, width, result;
7771 unsigned HOST_WIDE_INT lo;
7772 tree type;
7774 type = TREE_TYPE (arg);
7775 width = TYPE_PRECISION (type);
7776 lo = TREE_INT_CST_LOW (arg);
7778 /* Clear all the bits that are beyond the type's precision. */
7779 if (width > HOST_BITS_PER_WIDE_INT)
7781 hi = TREE_INT_CST_HIGH (arg);
7782 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7783 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7785 else
7787 hi = 0;
7788 if (width < HOST_BITS_PER_WIDE_INT)
7789 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7792 switch (DECL_FUNCTION_CODE (fndecl))
7794 CASE_INT_FN (BUILT_IN_FFS):
7795 if (lo != 0)
7796 result = ffs_hwi (lo);
7797 else if (hi != 0)
7798 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7799 else
7800 result = 0;
7801 break;
7803 CASE_INT_FN (BUILT_IN_CLZ):
7804 if (hi != 0)
7805 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7806 else if (lo != 0)
7807 result = width - floor_log2 (lo) - 1;
7808 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7809 result = width;
7810 break;
7812 CASE_INT_FN (BUILT_IN_CTZ):
7813 if (lo != 0)
7814 result = ctz_hwi (lo);
7815 else if (hi != 0)
7816 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7817 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7818 result = width;
7819 break;
7821 CASE_INT_FN (BUILT_IN_POPCOUNT):
7822 result = 0;
7823 while (lo)
7824 result++, lo &= lo - 1;
7825 while (hi)
7826 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7827 break;
7829 CASE_INT_FN (BUILT_IN_PARITY):
7830 result = 0;
7831 while (lo)
7832 result++, lo &= lo - 1;
7833 while (hi)
7834 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7835 result &= 1;
7836 break;
7838 default:
7839 gcc_unreachable ();
7842 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7845 return NULL_TREE;
7848 /* Fold function call to builtin_bswap and the long and long long
7849 variants. Return NULL_TREE if no simplification can be made. */
7850 static tree
7851 fold_builtin_bswap (tree fndecl, tree arg)
7853 if (! validate_arg (arg, INTEGER_TYPE))
7854 return NULL_TREE;
7856 /* Optimize constant value. */
7857 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7859 HOST_WIDE_INT hi, width, r_hi = 0;
7860 unsigned HOST_WIDE_INT lo, r_lo = 0;
7861 tree type;
7863 type = TREE_TYPE (arg);
7864 width = TYPE_PRECISION (type);
7865 lo = TREE_INT_CST_LOW (arg);
7866 hi = TREE_INT_CST_HIGH (arg);
7868 switch (DECL_FUNCTION_CODE (fndecl))
7870 case BUILT_IN_BSWAP32:
7871 case BUILT_IN_BSWAP64:
7873 int s;
7875 for (s = 0; s < width; s += 8)
7877 int d = width - s - 8;
7878 unsigned HOST_WIDE_INT byte;
7880 if (s < HOST_BITS_PER_WIDE_INT)
7881 byte = (lo >> s) & 0xff;
7882 else
7883 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7885 if (d < HOST_BITS_PER_WIDE_INT)
7886 r_lo |= byte << d;
7887 else
7888 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7892 break;
7894 default:
7895 gcc_unreachable ();
7898 if (width < HOST_BITS_PER_WIDE_INT)
7899 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7900 else
7901 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7904 return NULL_TREE;
7907 /* A subroutine of fold_builtin to fold the various logarithmic
7908 functions. Return NULL_TREE if no simplification can me made.
7909 FUNC is the corresponding MPFR logarithm function. */
7911 static tree
7912 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7913 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7915 if (validate_arg (arg, REAL_TYPE))
7917 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7918 tree res;
7919 const enum built_in_function fcode = builtin_mathfn_code (arg);
7921 /* Calculate the result when the argument is a constant. */
7922 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7923 return res;
7925 /* Special case, optimize logN(expN(x)) = x. */
7926 if (flag_unsafe_math_optimizations
7927 && ((func == mpfr_log
7928 && (fcode == BUILT_IN_EXP
7929 || fcode == BUILT_IN_EXPF
7930 || fcode == BUILT_IN_EXPL))
7931 || (func == mpfr_log2
7932 && (fcode == BUILT_IN_EXP2
7933 || fcode == BUILT_IN_EXP2F
7934 || fcode == BUILT_IN_EXP2L))
7935 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7936 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7938 /* Optimize logN(func()) for various exponential functions. We
7939 want to determine the value "x" and the power "exponent" in
7940 order to transform logN(x**exponent) into exponent*logN(x). */
7941 if (flag_unsafe_math_optimizations)
7943 tree exponent = 0, x = 0;
7945 switch (fcode)
7947 CASE_FLT_FN (BUILT_IN_EXP):
7948 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7949 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7950 dconst_e ()));
7951 exponent = CALL_EXPR_ARG (arg, 0);
7952 break;
7953 CASE_FLT_FN (BUILT_IN_EXP2):
7954 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7955 x = build_real (type, dconst2);
7956 exponent = CALL_EXPR_ARG (arg, 0);
7957 break;
7958 CASE_FLT_FN (BUILT_IN_EXP10):
7959 CASE_FLT_FN (BUILT_IN_POW10):
7960 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7962 REAL_VALUE_TYPE dconst10;
7963 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7964 x = build_real (type, dconst10);
7966 exponent = CALL_EXPR_ARG (arg, 0);
7967 break;
7968 CASE_FLT_FN (BUILT_IN_SQRT):
7969 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7970 x = CALL_EXPR_ARG (arg, 0);
7971 exponent = build_real (type, dconsthalf);
7972 break;
7973 CASE_FLT_FN (BUILT_IN_CBRT):
7974 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7975 x = CALL_EXPR_ARG (arg, 0);
7976 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7977 dconst_third ()));
7978 break;
7979 CASE_FLT_FN (BUILT_IN_POW):
7980 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7981 x = CALL_EXPR_ARG (arg, 0);
7982 exponent = CALL_EXPR_ARG (arg, 1);
7983 break;
7984 default:
7985 break;
7988 /* Now perform the optimization. */
7989 if (x && exponent)
7991 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7992 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7997 return NULL_TREE;
8000 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8001 NULL_TREE if no simplification can be made. */
8003 static tree
8004 fold_builtin_hypot (location_t loc, tree fndecl,
8005 tree arg0, tree arg1, tree type)
8007 tree res, narg0, narg1;
8009 if (!validate_arg (arg0, REAL_TYPE)
8010 || !validate_arg (arg1, REAL_TYPE))
8011 return NULL_TREE;
8013 /* Calculate the result when the argument is a constant. */
8014 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8015 return res;
8017 /* If either argument to hypot has a negate or abs, strip that off.
8018 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8019 narg0 = fold_strip_sign_ops (arg0);
8020 narg1 = fold_strip_sign_ops (arg1);
8021 if (narg0 || narg1)
8023 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8024 narg1 ? narg1 : arg1);
8027 /* If either argument is zero, hypot is fabs of the other. */
8028 if (real_zerop (arg0))
8029 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8030 else if (real_zerop (arg1))
8031 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8033 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8034 if (flag_unsafe_math_optimizations
8035 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8037 const REAL_VALUE_TYPE sqrt2_trunc
8038 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8039 return fold_build2_loc (loc, MULT_EXPR, type,
8040 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8041 build_real (type, sqrt2_trunc));
8044 return NULL_TREE;
8048 /* Fold a builtin function call to pow, powf, or powl. Return
8049 NULL_TREE if no simplification can be made. */
8050 static tree
8051 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8053 tree res;
8055 if (!validate_arg (arg0, REAL_TYPE)
8056 || !validate_arg (arg1, REAL_TYPE))
8057 return NULL_TREE;
8059 /* Calculate the result when the argument is a constant. */
8060 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8061 return res;
8063 /* Optimize pow(1.0,y) = 1.0. */
8064 if (real_onep (arg0))
8065 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8067 if (TREE_CODE (arg1) == REAL_CST
8068 && !TREE_OVERFLOW (arg1))
8070 REAL_VALUE_TYPE cint;
8071 REAL_VALUE_TYPE c;
8072 HOST_WIDE_INT n;
8074 c = TREE_REAL_CST (arg1);
8076 /* Optimize pow(x,0.0) = 1.0. */
8077 if (REAL_VALUES_EQUAL (c, dconst0))
8078 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8079 arg0);
8081 /* Optimize pow(x,1.0) = x. */
8082 if (REAL_VALUES_EQUAL (c, dconst1))
8083 return arg0;
8085 /* Optimize pow(x,-1.0) = 1.0/x. */
8086 if (REAL_VALUES_EQUAL (c, dconstm1))
8087 return fold_build2_loc (loc, RDIV_EXPR, type,
8088 build_real (type, dconst1), arg0);
8090 /* Optimize pow(x,0.5) = sqrt(x). */
8091 if (flag_unsafe_math_optimizations
8092 && REAL_VALUES_EQUAL (c, dconsthalf))
8094 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8096 if (sqrtfn != NULL_TREE)
8097 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8100 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8101 if (flag_unsafe_math_optimizations)
8103 const REAL_VALUE_TYPE dconstroot
8104 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8106 if (REAL_VALUES_EQUAL (c, dconstroot))
8108 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8109 if (cbrtfn != NULL_TREE)
8110 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8114 /* Check for an integer exponent. */
8115 n = real_to_integer (&c);
8116 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8117 if (real_identical (&c, &cint))
8119 /* Attempt to evaluate pow at compile-time, unless this should
8120 raise an exception. */
8121 if (TREE_CODE (arg0) == REAL_CST
8122 && !TREE_OVERFLOW (arg0)
8123 && (n > 0
8124 || (!flag_trapping_math && !flag_errno_math)
8125 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8127 REAL_VALUE_TYPE x;
8128 bool inexact;
8130 x = TREE_REAL_CST (arg0);
8131 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8132 if (flag_unsafe_math_optimizations || !inexact)
8133 return build_real (type, x);
8136 /* Strip sign ops from even integer powers. */
8137 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8139 tree narg0 = fold_strip_sign_ops (arg0);
8140 if (narg0)
8141 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8146 if (flag_unsafe_math_optimizations)
8148 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8150 /* Optimize pow(expN(x),y) = expN(x*y). */
8151 if (BUILTIN_EXPONENT_P (fcode))
8153 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8154 tree arg = CALL_EXPR_ARG (arg0, 0);
8155 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8156 return build_call_expr_loc (loc, expfn, 1, arg);
8159 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8160 if (BUILTIN_SQRT_P (fcode))
8162 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8163 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8164 build_real (type, dconsthalf));
8165 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8168 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8169 if (BUILTIN_CBRT_P (fcode))
8171 tree arg = CALL_EXPR_ARG (arg0, 0);
8172 if (tree_expr_nonnegative_p (arg))
8174 const REAL_VALUE_TYPE dconstroot
8175 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8176 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8177 build_real (type, dconstroot));
8178 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8182 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8183 if (fcode == BUILT_IN_POW
8184 || fcode == BUILT_IN_POWF
8185 || fcode == BUILT_IN_POWL)
8187 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8188 if (tree_expr_nonnegative_p (arg00))
8190 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8191 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8192 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8197 return NULL_TREE;
8200 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8201 Return NULL_TREE if no simplification can be made. */
8202 static tree
8203 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8204 tree arg0, tree arg1, tree type)
8206 if (!validate_arg (arg0, REAL_TYPE)
8207 || !validate_arg (arg1, INTEGER_TYPE))
8208 return NULL_TREE;
8210 /* Optimize pow(1.0,y) = 1.0. */
8211 if (real_onep (arg0))
8212 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8214 if (host_integerp (arg1, 0))
8216 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8218 /* Evaluate powi at compile-time. */
8219 if (TREE_CODE (arg0) == REAL_CST
8220 && !TREE_OVERFLOW (arg0))
8222 REAL_VALUE_TYPE x;
8223 x = TREE_REAL_CST (arg0);
8224 real_powi (&x, TYPE_MODE (type), &x, c);
8225 return build_real (type, x);
8228 /* Optimize pow(x,0) = 1.0. */
8229 if (c == 0)
8230 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8231 arg0);
8233 /* Optimize pow(x,1) = x. */
8234 if (c == 1)
8235 return arg0;
8237 /* Optimize pow(x,-1) = 1.0/x. */
8238 if (c == -1)
8239 return fold_build2_loc (loc, RDIV_EXPR, type,
8240 build_real (type, dconst1), arg0);
8243 return NULL_TREE;
8246 /* A subroutine of fold_builtin to fold the various exponent
8247 functions. Return NULL_TREE if no simplification can be made.
8248 FUNC is the corresponding MPFR exponent function. */
8250 static tree
8251 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8252 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8254 if (validate_arg (arg, REAL_TYPE))
8256 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8257 tree res;
8259 /* Calculate the result when the argument is a constant. */
8260 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8261 return res;
8263 /* Optimize expN(logN(x)) = x. */
8264 if (flag_unsafe_math_optimizations)
8266 const enum built_in_function fcode = builtin_mathfn_code (arg);
8268 if ((func == mpfr_exp
8269 && (fcode == BUILT_IN_LOG
8270 || fcode == BUILT_IN_LOGF
8271 || fcode == BUILT_IN_LOGL))
8272 || (func == mpfr_exp2
8273 && (fcode == BUILT_IN_LOG2
8274 || fcode == BUILT_IN_LOG2F
8275 || fcode == BUILT_IN_LOG2L))
8276 || (func == mpfr_exp10
8277 && (fcode == BUILT_IN_LOG10
8278 || fcode == BUILT_IN_LOG10F
8279 || fcode == BUILT_IN_LOG10L)))
8280 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8284 return NULL_TREE;
8287 /* Return true if VAR is a VAR_DECL or a component thereof. */
8289 static bool
8290 var_decl_component_p (tree var)
8292 tree inner = var;
8293 while (handled_component_p (inner))
8294 inner = TREE_OPERAND (inner, 0);
8295 return SSA_VAR_P (inner);
8298 /* Fold function call to builtin memset. Return
8299 NULL_TREE if no simplification can be made. */
8301 static tree
8302 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8303 tree type, bool ignore)
8305 tree var, ret, etype;
8306 unsigned HOST_WIDE_INT length, cval;
8308 if (! validate_arg (dest, POINTER_TYPE)
8309 || ! validate_arg (c, INTEGER_TYPE)
8310 || ! validate_arg (len, INTEGER_TYPE))
8311 return NULL_TREE;
8313 if (! host_integerp (len, 1))
8314 return NULL_TREE;
8316 /* If the LEN parameter is zero, return DEST. */
8317 if (integer_zerop (len))
8318 return omit_one_operand_loc (loc, type, dest, c);
8320 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8321 return NULL_TREE;
8323 var = dest;
8324 STRIP_NOPS (var);
8325 if (TREE_CODE (var) != ADDR_EXPR)
8326 return NULL_TREE;
8328 var = TREE_OPERAND (var, 0);
8329 if (TREE_THIS_VOLATILE (var))
8330 return NULL_TREE;
8332 etype = TREE_TYPE (var);
8333 if (TREE_CODE (etype) == ARRAY_TYPE)
8334 etype = TREE_TYPE (etype);
8336 if (!INTEGRAL_TYPE_P (etype)
8337 && !POINTER_TYPE_P (etype))
8338 return NULL_TREE;
8340 if (! var_decl_component_p (var))
8341 return NULL_TREE;
8343 length = tree_low_cst (len, 1);
8344 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8345 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8346 < length)
8347 return NULL_TREE;
8349 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8350 return NULL_TREE;
8352 if (integer_zerop (c))
8353 cval = 0;
8354 else
8356 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8357 return NULL_TREE;
8359 cval = TREE_INT_CST_LOW (c);
8360 cval &= 0xff;
8361 cval |= cval << 8;
8362 cval |= cval << 16;
8363 cval |= (cval << 31) << 1;
8366 ret = build_int_cst_type (etype, cval);
8367 var = build_fold_indirect_ref_loc (loc,
8368 fold_convert_loc (loc,
8369 build_pointer_type (etype),
8370 dest));
8371 ret = build2 (MODIFY_EXPR, etype, var, ret);
8372 if (ignore)
8373 return ret;
8375 return omit_one_operand_loc (loc, type, dest, ret);
8378 /* Fold function call to builtin memset. Return
8379 NULL_TREE if no simplification can be made. */
8381 static tree
8382 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8384 if (! validate_arg (dest, POINTER_TYPE)
8385 || ! validate_arg (size, INTEGER_TYPE))
8386 return NULL_TREE;
8388 if (!ignore)
8389 return NULL_TREE;
8391 /* New argument list transforming bzero(ptr x, int y) to
8392 memset(ptr x, int 0, size_t y). This is done this way
8393 so that if it isn't expanded inline, we fallback to
8394 calling bzero instead of memset. */
8396 return fold_builtin_memset (loc, dest, integer_zero_node,
8397 fold_convert_loc (loc, sizetype, size),
8398 void_type_node, ignore);
8401 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8402 NULL_TREE if no simplification can be made.
8403 If ENDP is 0, return DEST (like memcpy).
8404 If ENDP is 1, return DEST+LEN (like mempcpy).
8405 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8406 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8407 (memmove). */
8409 static tree
8410 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8411 tree len, tree type, bool ignore, int endp)
8413 tree destvar, srcvar, expr;
8415 if (! validate_arg (dest, POINTER_TYPE)
8416 || ! validate_arg (src, POINTER_TYPE)
8417 || ! validate_arg (len, INTEGER_TYPE))
8418 return NULL_TREE;
8420 /* If the LEN parameter is zero, return DEST. */
8421 if (integer_zerop (len))
8422 return omit_one_operand_loc (loc, type, dest, src);
8424 /* If SRC and DEST are the same (and not volatile), return
8425 DEST{,+LEN,+LEN-1}. */
8426 if (operand_equal_p (src, dest, 0))
8427 expr = len;
8428 else
8430 tree srctype, desttype;
8431 unsigned int src_align, dest_align;
8432 tree off0;
8434 if (endp == 3)
8436 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8437 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8439 /* Both DEST and SRC must be pointer types.
8440 ??? This is what old code did. Is the testing for pointer types
8441 really mandatory?
8443 If either SRC is readonly or length is 1, we can use memcpy. */
8444 if (!dest_align || !src_align)
8445 return NULL_TREE;
8446 if (readonly_data_expr (src)
8447 || (host_integerp (len, 1)
8448 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8449 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
8451 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8452 if (!fn)
8453 return NULL_TREE;
8454 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8457 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8458 if (TREE_CODE (src) == ADDR_EXPR
8459 && TREE_CODE (dest) == ADDR_EXPR)
8461 tree src_base, dest_base, fn;
8462 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8463 HOST_WIDE_INT size = -1;
8464 HOST_WIDE_INT maxsize = -1;
8466 srcvar = TREE_OPERAND (src, 0);
8467 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8468 &size, &maxsize);
8469 destvar = TREE_OPERAND (dest, 0);
8470 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8471 &size, &maxsize);
8472 if (host_integerp (len, 1))
8473 maxsize = tree_low_cst (len, 1);
8474 else
8475 maxsize = -1;
8476 src_offset /= BITS_PER_UNIT;
8477 dest_offset /= BITS_PER_UNIT;
8478 if (SSA_VAR_P (src_base)
8479 && SSA_VAR_P (dest_base))
8481 if (operand_equal_p (src_base, dest_base, 0)
8482 && ranges_overlap_p (src_offset, maxsize,
8483 dest_offset, maxsize))
8484 return NULL_TREE;
8486 else if (TREE_CODE (src_base) == MEM_REF
8487 && TREE_CODE (dest_base) == MEM_REF)
8489 double_int off;
8490 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8491 TREE_OPERAND (dest_base, 0), 0))
8492 return NULL_TREE;
8493 off = double_int_add (mem_ref_offset (src_base),
8494 shwi_to_double_int (src_offset));
8495 if (!double_int_fits_in_shwi_p (off))
8496 return NULL_TREE;
8497 src_offset = off.low;
8498 off = double_int_add (mem_ref_offset (dest_base),
8499 shwi_to_double_int (dest_offset));
8500 if (!double_int_fits_in_shwi_p (off))
8501 return NULL_TREE;
8502 dest_offset = off.low;
8503 if (ranges_overlap_p (src_offset, maxsize,
8504 dest_offset, maxsize))
8505 return NULL_TREE;
8507 else
8508 return NULL_TREE;
8510 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8511 if (!fn)
8512 return NULL_TREE;
8513 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8516 /* If the destination and source do not alias optimize into
8517 memcpy as well. */
8518 if ((is_gimple_min_invariant (dest)
8519 || TREE_CODE (dest) == SSA_NAME)
8520 && (is_gimple_min_invariant (src)
8521 || TREE_CODE (src) == SSA_NAME))
8523 ao_ref destr, srcr;
8524 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8525 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8526 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8528 tree fn;
8529 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8530 if (!fn)
8531 return NULL_TREE;
8532 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8536 return NULL_TREE;
8539 if (!host_integerp (len, 0))
8540 return NULL_TREE;
8541 /* FIXME:
8542 This logic lose for arguments like (type *)malloc (sizeof (type)),
8543 since we strip the casts of up to VOID return value from malloc.
8544 Perhaps we ought to inherit type from non-VOID argument here? */
8545 STRIP_NOPS (src);
8546 STRIP_NOPS (dest);
8547 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8548 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8550 tree tem = TREE_OPERAND (src, 0);
8551 STRIP_NOPS (tem);
8552 if (tem != TREE_OPERAND (src, 0))
8553 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8555 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8557 tree tem = TREE_OPERAND (dest, 0);
8558 STRIP_NOPS (tem);
8559 if (tem != TREE_OPERAND (dest, 0))
8560 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8562 srctype = TREE_TYPE (TREE_TYPE (src));
8563 if (srctype
8564 && TREE_CODE (srctype) == ARRAY_TYPE
8565 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8567 srctype = TREE_TYPE (srctype);
8568 STRIP_NOPS (src);
8569 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8571 desttype = TREE_TYPE (TREE_TYPE (dest));
8572 if (desttype
8573 && TREE_CODE (desttype) == ARRAY_TYPE
8574 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8576 desttype = TREE_TYPE (desttype);
8577 STRIP_NOPS (dest);
8578 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8580 if (!srctype || !desttype
8581 || TREE_ADDRESSABLE (srctype)
8582 || TREE_ADDRESSABLE (desttype)
8583 || !TYPE_SIZE_UNIT (srctype)
8584 || !TYPE_SIZE_UNIT (desttype)
8585 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8586 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST)
8587 return NULL_TREE;
8589 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8590 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8591 if (dest_align < TYPE_ALIGN (desttype)
8592 || src_align < TYPE_ALIGN (srctype))
8593 return NULL_TREE;
8595 if (!ignore)
8596 dest = builtin_save_expr (dest);
8598 /* Build accesses at offset zero with a ref-all character type. */
8599 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8600 ptr_mode, true), 0);
8602 destvar = dest;
8603 STRIP_NOPS (destvar);
8604 if (TREE_CODE (destvar) == ADDR_EXPR
8605 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8606 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8607 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8608 else
8609 destvar = NULL_TREE;
8611 srcvar = src;
8612 STRIP_NOPS (srcvar);
8613 if (TREE_CODE (srcvar) == ADDR_EXPR
8614 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8615 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8617 if (!destvar
8618 || src_align >= TYPE_ALIGN (desttype))
8619 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8620 srcvar, off0);
8621 else if (!STRICT_ALIGNMENT)
8623 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8624 src_align);
8625 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8627 else
8628 srcvar = NULL_TREE;
8630 else
8631 srcvar = NULL_TREE;
8633 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8634 return NULL_TREE;
8636 if (srcvar == NULL_TREE)
8638 STRIP_NOPS (src);
8639 if (src_align >= TYPE_ALIGN (desttype))
8640 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8641 else
8643 if (STRICT_ALIGNMENT)
8644 return NULL_TREE;
8645 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8646 src_align);
8647 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8650 else if (destvar == NULL_TREE)
8652 STRIP_NOPS (dest);
8653 if (dest_align >= TYPE_ALIGN (srctype))
8654 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8655 else
8657 if (STRICT_ALIGNMENT)
8658 return NULL_TREE;
8659 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8660 dest_align);
8661 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8665 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8668 if (ignore)
8669 return expr;
8671 if (endp == 0 || endp == 3)
8672 return omit_one_operand_loc (loc, type, dest, expr);
8674 if (expr == len)
8675 expr = NULL_TREE;
8677 if (endp == 2)
8678 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8679 ssize_int (1));
8681 len = fold_convert_loc (loc, sizetype, len);
8682 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8683 dest = fold_convert_loc (loc, type, dest);
8684 if (expr)
8685 dest = omit_one_operand_loc (loc, type, dest, expr);
8686 return dest;
8689 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8690 If LEN is not NULL, it represents the length of the string to be
8691 copied. Return NULL_TREE if no simplification can be made. */
8693 tree
8694 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8696 tree fn;
8698 if (!validate_arg (dest, POINTER_TYPE)
8699 || !validate_arg (src, POINTER_TYPE))
8700 return NULL_TREE;
8702 /* If SRC and DEST are the same (and not volatile), return DEST. */
8703 if (operand_equal_p (src, dest, 0))
8704 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8706 if (optimize_function_for_size_p (cfun))
8707 return NULL_TREE;
8709 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8710 if (!fn)
8711 return NULL_TREE;
8713 if (!len)
8715 len = c_strlen (src, 1);
8716 if (! len || TREE_SIDE_EFFECTS (len))
8717 return NULL_TREE;
8720 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8721 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8722 build_call_expr_loc (loc, fn, 3, dest, src, len));
8725 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8726 Return NULL_TREE if no simplification can be made. */
8728 static tree
8729 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8731 tree fn, len, lenp1, call, type;
8733 if (!validate_arg (dest, POINTER_TYPE)
8734 || !validate_arg (src, POINTER_TYPE))
8735 return NULL_TREE;
8737 len = c_strlen (src, 1);
8738 if (!len
8739 || TREE_CODE (len) != INTEGER_CST)
8740 return NULL_TREE;
8742 if (optimize_function_for_size_p (cfun)
8743 /* If length is zero it's small enough. */
8744 && !integer_zerop (len))
8745 return NULL_TREE;
8747 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8748 if (!fn)
8749 return NULL_TREE;
8751 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8752 /* We use dest twice in building our expression. Save it from
8753 multiple expansions. */
8754 dest = builtin_save_expr (dest);
8755 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8757 type = TREE_TYPE (TREE_TYPE (fndecl));
8758 len = fold_convert_loc (loc, sizetype, len);
8759 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8760 dest = fold_convert_loc (loc, type, dest);
8761 dest = omit_one_operand_loc (loc, type, dest, call);
8762 return dest;
8765 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8766 If SLEN is not NULL, it represents the length of the source string.
8767 Return NULL_TREE if no simplification can be made. */
8769 tree
8770 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8771 tree src, tree len, tree slen)
8773 tree fn;
8775 if (!validate_arg (dest, POINTER_TYPE)
8776 || !validate_arg (src, POINTER_TYPE)
8777 || !validate_arg (len, INTEGER_TYPE))
8778 return NULL_TREE;
8780 /* If the LEN parameter is zero, return DEST. */
8781 if (integer_zerop (len))
8782 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8784 /* We can't compare slen with len as constants below if len is not a
8785 constant. */
8786 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8787 return NULL_TREE;
8789 if (!slen)
8790 slen = c_strlen (src, 1);
8792 /* Now, we must be passed a constant src ptr parameter. */
8793 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8794 return NULL_TREE;
8796 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8798 /* We do not support simplification of this case, though we do
8799 support it when expanding trees into RTL. */
8800 /* FIXME: generate a call to __builtin_memset. */
8801 if (tree_int_cst_lt (slen, len))
8802 return NULL_TREE;
8804 /* OK transform into builtin memcpy. */
8805 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8806 if (!fn)
8807 return NULL_TREE;
8808 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8809 build_call_expr_loc (loc, fn, 3, dest, src, len));
8812 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8813 arguments to the call, and TYPE is its return type.
8814 Return NULL_TREE if no simplification can be made. */
8816 static tree
8817 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8819 if (!validate_arg (arg1, POINTER_TYPE)
8820 || !validate_arg (arg2, INTEGER_TYPE)
8821 || !validate_arg (len, INTEGER_TYPE))
8822 return NULL_TREE;
8823 else
8825 const char *p1;
8827 if (TREE_CODE (arg2) != INTEGER_CST
8828 || !host_integerp (len, 1))
8829 return NULL_TREE;
8831 p1 = c_getstr (arg1);
8832 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8834 char c;
8835 const char *r;
8836 tree tem;
8838 if (target_char_cast (arg2, &c))
8839 return NULL_TREE;
8841 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8843 if (r == NULL)
8844 return build_int_cst (TREE_TYPE (arg1), 0);
8846 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8847 size_int (r - p1));
8848 return fold_convert_loc (loc, type, tem);
8850 return NULL_TREE;
8854 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8855 Return NULL_TREE if no simplification can be made. */
8857 static tree
8858 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8860 const char *p1, *p2;
8862 if (!validate_arg (arg1, POINTER_TYPE)
8863 || !validate_arg (arg2, POINTER_TYPE)
8864 || !validate_arg (len, INTEGER_TYPE))
8865 return NULL_TREE;
8867 /* If the LEN parameter is zero, return zero. */
8868 if (integer_zerop (len))
8869 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8870 arg1, arg2);
8872 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8873 if (operand_equal_p (arg1, arg2, 0))
8874 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8876 p1 = c_getstr (arg1);
8877 p2 = c_getstr (arg2);
8879 /* If all arguments are constant, and the value of len is not greater
8880 than the lengths of arg1 and arg2, evaluate at compile-time. */
8881 if (host_integerp (len, 1) && p1 && p2
8882 && compare_tree_int (len, strlen (p1) + 1) <= 0
8883 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8885 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8887 if (r > 0)
8888 return integer_one_node;
8889 else if (r < 0)
8890 return integer_minus_one_node;
8891 else
8892 return integer_zero_node;
8895 /* If len parameter is one, return an expression corresponding to
8896 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8897 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8899 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8900 tree cst_uchar_ptr_node
8901 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8903 tree ind1
8904 = fold_convert_loc (loc, integer_type_node,
8905 build1 (INDIRECT_REF, cst_uchar_node,
8906 fold_convert_loc (loc,
8907 cst_uchar_ptr_node,
8908 arg1)));
8909 tree ind2
8910 = fold_convert_loc (loc, integer_type_node,
8911 build1 (INDIRECT_REF, cst_uchar_node,
8912 fold_convert_loc (loc,
8913 cst_uchar_ptr_node,
8914 arg2)));
8915 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8918 return NULL_TREE;
8921 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8922 Return NULL_TREE if no simplification can be made. */
8924 static tree
8925 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8927 const char *p1, *p2;
8929 if (!validate_arg (arg1, POINTER_TYPE)
8930 || !validate_arg (arg2, POINTER_TYPE))
8931 return NULL_TREE;
8933 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8934 if (operand_equal_p (arg1, arg2, 0))
8935 return integer_zero_node;
8937 p1 = c_getstr (arg1);
8938 p2 = c_getstr (arg2);
8940 if (p1 && p2)
8942 const int i = strcmp (p1, p2);
8943 if (i < 0)
8944 return integer_minus_one_node;
8945 else if (i > 0)
8946 return integer_one_node;
8947 else
8948 return integer_zero_node;
8951 /* If the second arg is "", return *(const unsigned char*)arg1. */
8952 if (p2 && *p2 == '\0')
8954 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8955 tree cst_uchar_ptr_node
8956 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8958 return fold_convert_loc (loc, integer_type_node,
8959 build1 (INDIRECT_REF, cst_uchar_node,
8960 fold_convert_loc (loc,
8961 cst_uchar_ptr_node,
8962 arg1)));
8965 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8966 if (p1 && *p1 == '\0')
8968 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8969 tree cst_uchar_ptr_node
8970 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8972 tree temp
8973 = fold_convert_loc (loc, integer_type_node,
8974 build1 (INDIRECT_REF, cst_uchar_node,
8975 fold_convert_loc (loc,
8976 cst_uchar_ptr_node,
8977 arg2)));
8978 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8981 return NULL_TREE;
8984 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8985 Return NULL_TREE if no simplification can be made. */
8987 static tree
8988 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8990 const char *p1, *p2;
8992 if (!validate_arg (arg1, POINTER_TYPE)
8993 || !validate_arg (arg2, POINTER_TYPE)
8994 || !validate_arg (len, INTEGER_TYPE))
8995 return NULL_TREE;
8997 /* If the LEN parameter is zero, return zero. */
8998 if (integer_zerop (len))
8999 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9000 arg1, arg2);
9002 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9003 if (operand_equal_p (arg1, arg2, 0))
9004 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9006 p1 = c_getstr (arg1);
9007 p2 = c_getstr (arg2);
9009 if (host_integerp (len, 1) && p1 && p2)
9011 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9012 if (i > 0)
9013 return integer_one_node;
9014 else if (i < 0)
9015 return integer_minus_one_node;
9016 else
9017 return integer_zero_node;
9020 /* If the second arg is "", and the length is greater than zero,
9021 return *(const unsigned char*)arg1. */
9022 if (p2 && *p2 == '\0'
9023 && TREE_CODE (len) == INTEGER_CST
9024 && tree_int_cst_sgn (len) == 1)
9026 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9027 tree cst_uchar_ptr_node
9028 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9030 return fold_convert_loc (loc, integer_type_node,
9031 build1 (INDIRECT_REF, cst_uchar_node,
9032 fold_convert_loc (loc,
9033 cst_uchar_ptr_node,
9034 arg1)));
9037 /* If the first arg is "", and the length is greater than zero,
9038 return -*(const unsigned char*)arg2. */
9039 if (p1 && *p1 == '\0'
9040 && TREE_CODE (len) == INTEGER_CST
9041 && tree_int_cst_sgn (len) == 1)
9043 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9044 tree cst_uchar_ptr_node
9045 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9047 tree temp = fold_convert_loc (loc, integer_type_node,
9048 build1 (INDIRECT_REF, cst_uchar_node,
9049 fold_convert_loc (loc,
9050 cst_uchar_ptr_node,
9051 arg2)));
9052 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9055 /* If len parameter is one, return an expression corresponding to
9056 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9057 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9059 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9060 tree cst_uchar_ptr_node
9061 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9063 tree ind1 = fold_convert_loc (loc, integer_type_node,
9064 build1 (INDIRECT_REF, cst_uchar_node,
9065 fold_convert_loc (loc,
9066 cst_uchar_ptr_node,
9067 arg1)));
9068 tree ind2 = fold_convert_loc (loc, integer_type_node,
9069 build1 (INDIRECT_REF, cst_uchar_node,
9070 fold_convert_loc (loc,
9071 cst_uchar_ptr_node,
9072 arg2)));
9073 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9076 return NULL_TREE;
9079 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9080 ARG. Return NULL_TREE if no simplification can be made. */
9082 static tree
9083 fold_builtin_signbit (location_t loc, tree arg, tree type)
9085 if (!validate_arg (arg, REAL_TYPE))
9086 return NULL_TREE;
9088 /* If ARG is a compile-time constant, determine the result. */
9089 if (TREE_CODE (arg) == REAL_CST
9090 && !TREE_OVERFLOW (arg))
9092 REAL_VALUE_TYPE c;
9094 c = TREE_REAL_CST (arg);
9095 return (REAL_VALUE_NEGATIVE (c)
9096 ? build_one_cst (type)
9097 : build_zero_cst (type));
9100 /* If ARG is non-negative, the result is always zero. */
9101 if (tree_expr_nonnegative_p (arg))
9102 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9104 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9105 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9106 return fold_build2_loc (loc, LT_EXPR, type, arg,
9107 build_real (TREE_TYPE (arg), dconst0));
9109 return NULL_TREE;
9112 /* Fold function call to builtin copysign, copysignf or copysignl with
9113 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9114 be made. */
9116 static tree
9117 fold_builtin_copysign (location_t loc, tree fndecl,
9118 tree arg1, tree arg2, tree type)
9120 tree tem;
9122 if (!validate_arg (arg1, REAL_TYPE)
9123 || !validate_arg (arg2, REAL_TYPE))
9124 return NULL_TREE;
9126 /* copysign(X,X) is X. */
9127 if (operand_equal_p (arg1, arg2, 0))
9128 return fold_convert_loc (loc, type, arg1);
9130 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9131 if (TREE_CODE (arg1) == REAL_CST
9132 && TREE_CODE (arg2) == REAL_CST
9133 && !TREE_OVERFLOW (arg1)
9134 && !TREE_OVERFLOW (arg2))
9136 REAL_VALUE_TYPE c1, c2;
9138 c1 = TREE_REAL_CST (arg1);
9139 c2 = TREE_REAL_CST (arg2);
9140 /* c1.sign := c2.sign. */
9141 real_copysign (&c1, &c2);
9142 return build_real (type, c1);
9145 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9146 Remember to evaluate Y for side-effects. */
9147 if (tree_expr_nonnegative_p (arg2))
9148 return omit_one_operand_loc (loc, type,
9149 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9150 arg2);
9152 /* Strip sign changing operations for the first argument. */
9153 tem = fold_strip_sign_ops (arg1);
9154 if (tem)
9155 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9157 return NULL_TREE;
9160 /* Fold a call to builtin isascii with argument ARG. */
9162 static tree
9163 fold_builtin_isascii (location_t loc, tree arg)
9165 if (!validate_arg (arg, INTEGER_TYPE))
9166 return NULL_TREE;
9167 else
9169 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9170 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9171 build_int_cst (NULL_TREE,
9172 ~ (unsigned HOST_WIDE_INT) 0x7f));
9173 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9174 arg, integer_zero_node);
9178 /* Fold a call to builtin toascii with argument ARG. */
9180 static tree
9181 fold_builtin_toascii (location_t loc, tree arg)
9183 if (!validate_arg (arg, INTEGER_TYPE))
9184 return NULL_TREE;
9186 /* Transform toascii(c) -> (c & 0x7f). */
9187 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9188 build_int_cst (NULL_TREE, 0x7f));
9191 /* Fold a call to builtin isdigit with argument ARG. */
9193 static tree
9194 fold_builtin_isdigit (location_t loc, tree arg)
9196 if (!validate_arg (arg, INTEGER_TYPE))
9197 return NULL_TREE;
9198 else
9200 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9201 /* According to the C standard, isdigit is unaffected by locale.
9202 However, it definitely is affected by the target character set. */
9203 unsigned HOST_WIDE_INT target_digit0
9204 = lang_hooks.to_target_charset ('0');
9206 if (target_digit0 == 0)
9207 return NULL_TREE;
9209 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9210 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9211 build_int_cst (unsigned_type_node, target_digit0));
9212 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9213 build_int_cst (unsigned_type_node, 9));
9217 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9219 static tree
9220 fold_builtin_fabs (location_t loc, tree arg, tree type)
9222 if (!validate_arg (arg, REAL_TYPE))
9223 return NULL_TREE;
9225 arg = fold_convert_loc (loc, type, arg);
9226 if (TREE_CODE (arg) == REAL_CST)
9227 return fold_abs_const (arg, type);
9228 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9231 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9233 static tree
9234 fold_builtin_abs (location_t loc, tree arg, tree type)
9236 if (!validate_arg (arg, INTEGER_TYPE))
9237 return NULL_TREE;
9239 arg = fold_convert_loc (loc, type, arg);
9240 if (TREE_CODE (arg) == INTEGER_CST)
9241 return fold_abs_const (arg, type);
9242 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9245 /* Fold a fma operation with arguments ARG[012]. */
9247 tree
9248 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9249 tree type, tree arg0, tree arg1, tree arg2)
9251 if (TREE_CODE (arg0) == REAL_CST
9252 && TREE_CODE (arg1) == REAL_CST
9253 && TREE_CODE (arg2) == REAL_CST)
9254 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9256 return NULL_TREE;
9259 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9261 static tree
9262 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9264 if (validate_arg (arg0, REAL_TYPE)
9265 && validate_arg(arg1, REAL_TYPE)
9266 && validate_arg(arg2, REAL_TYPE))
9268 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9269 if (tem)
9270 return tem;
9272 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9273 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9274 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9276 return NULL_TREE;
9279 /* Fold a call to builtin fmin or fmax. */
9281 static tree
9282 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9283 tree type, bool max)
9285 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9287 /* Calculate the result when the argument is a constant. */
9288 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9290 if (res)
9291 return res;
9293 /* If either argument is NaN, return the other one. Avoid the
9294 transformation if we get (and honor) a signalling NaN. Using
9295 omit_one_operand() ensures we create a non-lvalue. */
9296 if (TREE_CODE (arg0) == REAL_CST
9297 && real_isnan (&TREE_REAL_CST (arg0))
9298 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9299 || ! TREE_REAL_CST (arg0).signalling))
9300 return omit_one_operand_loc (loc, type, arg1, arg0);
9301 if (TREE_CODE (arg1) == REAL_CST
9302 && real_isnan (&TREE_REAL_CST (arg1))
9303 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9304 || ! TREE_REAL_CST (arg1).signalling))
9305 return omit_one_operand_loc (loc, type, arg0, arg1);
9307 /* Transform fmin/fmax(x,x) -> x. */
9308 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9309 return omit_one_operand_loc (loc, type, arg0, arg1);
9311 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9312 functions to return the numeric arg if the other one is NaN.
9313 These tree codes don't honor that, so only transform if
9314 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9315 handled, so we don't have to worry about it either. */
9316 if (flag_finite_math_only)
9317 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9318 fold_convert_loc (loc, type, arg0),
9319 fold_convert_loc (loc, type, arg1));
9321 return NULL_TREE;
9324 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9326 static tree
9327 fold_builtin_carg (location_t loc, tree arg, tree type)
9329 if (validate_arg (arg, COMPLEX_TYPE)
9330 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9332 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9334 if (atan2_fn)
9336 tree new_arg = builtin_save_expr (arg);
9337 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9338 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9339 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9343 return NULL_TREE;
9346 /* Fold a call to builtin logb/ilogb. */
9348 static tree
9349 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9351 if (! validate_arg (arg, REAL_TYPE))
9352 return NULL_TREE;
9354 STRIP_NOPS (arg);
9356 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9358 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9360 switch (value->cl)
9362 case rvc_nan:
9363 case rvc_inf:
9364 /* If arg is Inf or NaN and we're logb, return it. */
9365 if (TREE_CODE (rettype) == REAL_TYPE)
9366 return fold_convert_loc (loc, rettype, arg);
9367 /* Fall through... */
9368 case rvc_zero:
9369 /* Zero may set errno and/or raise an exception for logb, also
9370 for ilogb we don't know FP_ILOGB0. */
9371 return NULL_TREE;
9372 case rvc_normal:
9373 /* For normal numbers, proceed iff radix == 2. In GCC,
9374 normalized significands are in the range [0.5, 1.0). We
9375 want the exponent as if they were [1.0, 2.0) so get the
9376 exponent and subtract 1. */
9377 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9378 return fold_convert_loc (loc, rettype,
9379 build_int_cst (NULL_TREE,
9380 REAL_EXP (value)-1));
9381 break;
9385 return NULL_TREE;
9388 /* Fold a call to builtin significand, if radix == 2. */
9390 static tree
9391 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9393 if (! validate_arg (arg, REAL_TYPE))
9394 return NULL_TREE;
9396 STRIP_NOPS (arg);
9398 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9400 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9402 switch (value->cl)
9404 case rvc_zero:
9405 case rvc_nan:
9406 case rvc_inf:
9407 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9408 return fold_convert_loc (loc, rettype, arg);
9409 case rvc_normal:
9410 /* For normal numbers, proceed iff radix == 2. */
9411 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9413 REAL_VALUE_TYPE result = *value;
9414 /* In GCC, normalized significands are in the range [0.5,
9415 1.0). We want them to be [1.0, 2.0) so set the
9416 exponent to 1. */
9417 SET_REAL_EXP (&result, 1);
9418 return build_real (rettype, result);
9420 break;
9424 return NULL_TREE;
9427 /* Fold a call to builtin frexp, we can assume the base is 2. */
9429 static tree
9430 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9432 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9433 return NULL_TREE;
9435 STRIP_NOPS (arg0);
9437 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9438 return NULL_TREE;
9440 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9442 /* Proceed if a valid pointer type was passed in. */
9443 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9445 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9446 tree frac, exp;
9448 switch (value->cl)
9450 case rvc_zero:
9451 /* For +-0, return (*exp = 0, +-0). */
9452 exp = integer_zero_node;
9453 frac = arg0;
9454 break;
9455 case rvc_nan:
9456 case rvc_inf:
9457 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9458 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9459 case rvc_normal:
9461 /* Since the frexp function always expects base 2, and in
9462 GCC normalized significands are already in the range
9463 [0.5, 1.0), we have exactly what frexp wants. */
9464 REAL_VALUE_TYPE frac_rvt = *value;
9465 SET_REAL_EXP (&frac_rvt, 0);
9466 frac = build_real (rettype, frac_rvt);
9467 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9469 break;
9470 default:
9471 gcc_unreachable ();
9474 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9475 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9476 TREE_SIDE_EFFECTS (arg1) = 1;
9477 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9480 return NULL_TREE;
9483 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9484 then we can assume the base is two. If it's false, then we have to
9485 check the mode of the TYPE parameter in certain cases. */
9487 static tree
9488 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9489 tree type, bool ldexp)
9491 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9493 STRIP_NOPS (arg0);
9494 STRIP_NOPS (arg1);
9496 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9497 if (real_zerop (arg0) || integer_zerop (arg1)
9498 || (TREE_CODE (arg0) == REAL_CST
9499 && !real_isfinite (&TREE_REAL_CST (arg0))))
9500 return omit_one_operand_loc (loc, type, arg0, arg1);
9502 /* If both arguments are constant, then try to evaluate it. */
9503 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9504 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9505 && host_integerp (arg1, 0))
9507 /* Bound the maximum adjustment to twice the range of the
9508 mode's valid exponents. Use abs to ensure the range is
9509 positive as a sanity check. */
9510 const long max_exp_adj = 2 *
9511 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9512 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9514 /* Get the user-requested adjustment. */
9515 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9517 /* The requested adjustment must be inside this range. This
9518 is a preliminary cap to avoid things like overflow, we
9519 may still fail to compute the result for other reasons. */
9520 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9522 REAL_VALUE_TYPE initial_result;
9524 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9526 /* Ensure we didn't overflow. */
9527 if (! real_isinf (&initial_result))
9529 const REAL_VALUE_TYPE trunc_result
9530 = real_value_truncate (TYPE_MODE (type), initial_result);
9532 /* Only proceed if the target mode can hold the
9533 resulting value. */
9534 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9535 return build_real (type, trunc_result);
9541 return NULL_TREE;
9544 /* Fold a call to builtin modf. */
9546 static tree
9547 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9549 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9550 return NULL_TREE;
9552 STRIP_NOPS (arg0);
9554 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9555 return NULL_TREE;
9557 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9559 /* Proceed if a valid pointer type was passed in. */
9560 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9562 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9563 REAL_VALUE_TYPE trunc, frac;
9565 switch (value->cl)
9567 case rvc_nan:
9568 case rvc_zero:
9569 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9570 trunc = frac = *value;
9571 break;
9572 case rvc_inf:
9573 /* For +-Inf, return (*arg1 = arg0, +-0). */
9574 frac = dconst0;
9575 frac.sign = value->sign;
9576 trunc = *value;
9577 break;
9578 case rvc_normal:
9579 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9580 real_trunc (&trunc, VOIDmode, value);
9581 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9582 /* If the original number was negative and already
9583 integral, then the fractional part is -0.0. */
9584 if (value->sign && frac.cl == rvc_zero)
9585 frac.sign = value->sign;
9586 break;
9589 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9590 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9591 build_real (rettype, trunc));
9592 TREE_SIDE_EFFECTS (arg1) = 1;
9593 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9594 build_real (rettype, frac));
9597 return NULL_TREE;
9600 /* Given a location LOC, an interclass builtin function decl FNDECL
9601 and its single argument ARG, return an folded expression computing
9602 the same, or NULL_TREE if we either couldn't or didn't want to fold
9603 (the latter happen if there's an RTL instruction available). */
9605 static tree
9606 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9608 enum machine_mode mode;
9610 if (!validate_arg (arg, REAL_TYPE))
9611 return NULL_TREE;
9613 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9614 return NULL_TREE;
9616 mode = TYPE_MODE (TREE_TYPE (arg));
9618 /* If there is no optab, try generic code. */
9619 switch (DECL_FUNCTION_CODE (fndecl))
9621 tree result;
9623 CASE_FLT_FN (BUILT_IN_ISINF):
9625 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9626 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9627 tree const type = TREE_TYPE (arg);
9628 REAL_VALUE_TYPE r;
9629 char buf[128];
9631 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9632 real_from_string (&r, buf);
9633 result = build_call_expr (isgr_fn, 2,
9634 fold_build1_loc (loc, ABS_EXPR, type, arg),
9635 build_real (type, r));
9636 return result;
9638 CASE_FLT_FN (BUILT_IN_FINITE):
9639 case BUILT_IN_ISFINITE:
9641 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9642 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9643 tree const type = TREE_TYPE (arg);
9644 REAL_VALUE_TYPE r;
9645 char buf[128];
9647 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9648 real_from_string (&r, buf);
9649 result = build_call_expr (isle_fn, 2,
9650 fold_build1_loc (loc, ABS_EXPR, type, arg),
9651 build_real (type, r));
9652 /*result = fold_build2_loc (loc, UNGT_EXPR,
9653 TREE_TYPE (TREE_TYPE (fndecl)),
9654 fold_build1_loc (loc, ABS_EXPR, type, arg),
9655 build_real (type, r));
9656 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9657 TREE_TYPE (TREE_TYPE (fndecl)),
9658 result);*/
9659 return result;
9661 case BUILT_IN_ISNORMAL:
9663 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9664 islessequal(fabs(x),DBL_MAX). */
9665 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9666 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9667 tree const type = TREE_TYPE (arg);
9668 REAL_VALUE_TYPE rmax, rmin;
9669 char buf[128];
9671 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9672 real_from_string (&rmax, buf);
9673 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9674 real_from_string (&rmin, buf);
9675 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9676 result = build_call_expr (isle_fn, 2, arg,
9677 build_real (type, rmax));
9678 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9679 build_call_expr (isge_fn, 2, arg,
9680 build_real (type, rmin)));
9681 return result;
9683 default:
9684 break;
9687 return NULL_TREE;
9690 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9691 ARG is the argument for the call. */
9693 static tree
9694 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9696 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9697 REAL_VALUE_TYPE r;
9699 if (!validate_arg (arg, REAL_TYPE))
9700 return NULL_TREE;
9702 switch (builtin_index)
9704 case BUILT_IN_ISINF:
9705 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9706 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9708 if (TREE_CODE (arg) == REAL_CST)
9710 r = TREE_REAL_CST (arg);
9711 if (real_isinf (&r))
9712 return real_compare (GT_EXPR, &r, &dconst0)
9713 ? integer_one_node : integer_minus_one_node;
9714 else
9715 return integer_zero_node;
9718 return NULL_TREE;
9720 case BUILT_IN_ISINF_SIGN:
9722 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9723 /* In a boolean context, GCC will fold the inner COND_EXPR to
9724 1. So e.g. "if (isinf_sign(x))" would be folded to just
9725 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9726 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9727 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9728 tree tmp = NULL_TREE;
9730 arg = builtin_save_expr (arg);
9732 if (signbit_fn && isinf_fn)
9734 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9735 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9737 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9738 signbit_call, integer_zero_node);
9739 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9740 isinf_call, integer_zero_node);
9742 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9743 integer_minus_one_node, integer_one_node);
9744 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9745 isinf_call, tmp,
9746 integer_zero_node);
9749 return tmp;
9752 case BUILT_IN_ISFINITE:
9753 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9754 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9755 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9757 if (TREE_CODE (arg) == REAL_CST)
9759 r = TREE_REAL_CST (arg);
9760 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9763 return NULL_TREE;
9765 case BUILT_IN_ISNAN:
9766 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9767 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9769 if (TREE_CODE (arg) == REAL_CST)
9771 r = TREE_REAL_CST (arg);
9772 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9775 arg = builtin_save_expr (arg);
9776 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9778 default:
9779 gcc_unreachable ();
9783 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9784 This builtin will generate code to return the appropriate floating
9785 point classification depending on the value of the floating point
9786 number passed in. The possible return values must be supplied as
9787 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9788 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9789 one floating point argument which is "type generic". */
9791 static tree
9792 fold_builtin_fpclassify (location_t loc, tree exp)
9794 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9795 arg, type, res, tmp;
9796 enum machine_mode mode;
9797 REAL_VALUE_TYPE r;
9798 char buf[128];
9800 /* Verify the required arguments in the original call. */
9801 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9802 INTEGER_TYPE, INTEGER_TYPE,
9803 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9804 return NULL_TREE;
9806 fp_nan = CALL_EXPR_ARG (exp, 0);
9807 fp_infinite = CALL_EXPR_ARG (exp, 1);
9808 fp_normal = CALL_EXPR_ARG (exp, 2);
9809 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9810 fp_zero = CALL_EXPR_ARG (exp, 4);
9811 arg = CALL_EXPR_ARG (exp, 5);
9812 type = TREE_TYPE (arg);
9813 mode = TYPE_MODE (type);
9814 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9816 /* fpclassify(x) ->
9817 isnan(x) ? FP_NAN :
9818 (fabs(x) == Inf ? FP_INFINITE :
9819 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9820 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9822 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9823 build_real (type, dconst0));
9824 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9825 tmp, fp_zero, fp_subnormal);
9827 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9828 real_from_string (&r, buf);
9829 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9830 arg, build_real (type, r));
9831 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9833 if (HONOR_INFINITIES (mode))
9835 real_inf (&r);
9836 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9837 build_real (type, r));
9838 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9839 fp_infinite, res);
9842 if (HONOR_NANS (mode))
9844 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9845 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9848 return res;
9851 /* Fold a call to an unordered comparison function such as
9852 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9853 being called and ARG0 and ARG1 are the arguments for the call.
9854 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9855 the opposite of the desired result. UNORDERED_CODE is used
9856 for modes that can hold NaNs and ORDERED_CODE is used for
9857 the rest. */
9859 static tree
9860 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9861 enum tree_code unordered_code,
9862 enum tree_code ordered_code)
9864 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9865 enum tree_code code;
9866 tree type0, type1;
9867 enum tree_code code0, code1;
9868 tree cmp_type = NULL_TREE;
9870 type0 = TREE_TYPE (arg0);
9871 type1 = TREE_TYPE (arg1);
9873 code0 = TREE_CODE (type0);
9874 code1 = TREE_CODE (type1);
9876 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9877 /* Choose the wider of two real types. */
9878 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9879 ? type0 : type1;
9880 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9881 cmp_type = type0;
9882 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9883 cmp_type = type1;
9885 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9886 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9888 if (unordered_code == UNORDERED_EXPR)
9890 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9891 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9892 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9895 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9896 : ordered_code;
9897 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9898 fold_build2_loc (loc, code, type, arg0, arg1));
9901 /* Fold a call to built-in function FNDECL with 0 arguments.
9902 IGNORE is true if the result of the function call is ignored. This
9903 function returns NULL_TREE if no simplification was possible. */
9905 static tree
9906 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9908 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9909 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9910 switch (fcode)
9912 CASE_FLT_FN (BUILT_IN_INF):
9913 case BUILT_IN_INFD32:
9914 case BUILT_IN_INFD64:
9915 case BUILT_IN_INFD128:
9916 return fold_builtin_inf (loc, type, true);
9918 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9919 return fold_builtin_inf (loc, type, false);
9921 case BUILT_IN_CLASSIFY_TYPE:
9922 return fold_builtin_classify_type (NULL_TREE);
9924 default:
9925 break;
9927 return NULL_TREE;
9930 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9931 IGNORE is true if the result of the function call is ignored. This
9932 function returns NULL_TREE if no simplification was possible. */
9934 static tree
9935 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9937 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9938 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9939 switch (fcode)
9941 case BUILT_IN_CONSTANT_P:
9943 tree val = fold_builtin_constant_p (arg0);
9945 /* Gimplification will pull the CALL_EXPR for the builtin out of
9946 an if condition. When not optimizing, we'll not CSE it back.
9947 To avoid link error types of regressions, return false now. */
9948 if (!val && !optimize)
9949 val = integer_zero_node;
9951 return val;
9954 case BUILT_IN_CLASSIFY_TYPE:
9955 return fold_builtin_classify_type (arg0);
9957 case BUILT_IN_STRLEN:
9958 return fold_builtin_strlen (loc, type, arg0);
9960 CASE_FLT_FN (BUILT_IN_FABS):
9961 return fold_builtin_fabs (loc, arg0, type);
9963 case BUILT_IN_ABS:
9964 case BUILT_IN_LABS:
9965 case BUILT_IN_LLABS:
9966 case BUILT_IN_IMAXABS:
9967 return fold_builtin_abs (loc, arg0, type);
9969 CASE_FLT_FN (BUILT_IN_CONJ):
9970 if (validate_arg (arg0, COMPLEX_TYPE)
9971 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9972 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9973 break;
9975 CASE_FLT_FN (BUILT_IN_CREAL):
9976 if (validate_arg (arg0, COMPLEX_TYPE)
9977 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9978 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9979 break;
9981 CASE_FLT_FN (BUILT_IN_CIMAG):
9982 if (validate_arg (arg0, COMPLEX_TYPE)
9983 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9984 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9985 break;
9987 CASE_FLT_FN (BUILT_IN_CCOS):
9988 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9990 CASE_FLT_FN (BUILT_IN_CCOSH):
9991 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9993 CASE_FLT_FN (BUILT_IN_CPROJ):
9994 return fold_builtin_cproj(loc, arg0, type);
9996 CASE_FLT_FN (BUILT_IN_CSIN):
9997 if (validate_arg (arg0, COMPLEX_TYPE)
9998 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9999 return do_mpc_arg1 (arg0, type, mpc_sin);
10000 break;
10002 CASE_FLT_FN (BUILT_IN_CSINH):
10003 if (validate_arg (arg0, COMPLEX_TYPE)
10004 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10005 return do_mpc_arg1 (arg0, type, mpc_sinh);
10006 break;
10008 CASE_FLT_FN (BUILT_IN_CTAN):
10009 if (validate_arg (arg0, COMPLEX_TYPE)
10010 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10011 return do_mpc_arg1 (arg0, type, mpc_tan);
10012 break;
10014 CASE_FLT_FN (BUILT_IN_CTANH):
10015 if (validate_arg (arg0, COMPLEX_TYPE)
10016 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10017 return do_mpc_arg1 (arg0, type, mpc_tanh);
10018 break;
10020 CASE_FLT_FN (BUILT_IN_CLOG):
10021 if (validate_arg (arg0, COMPLEX_TYPE)
10022 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10023 return do_mpc_arg1 (arg0, type, mpc_log);
10024 break;
10026 CASE_FLT_FN (BUILT_IN_CSQRT):
10027 if (validate_arg (arg0, COMPLEX_TYPE)
10028 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10029 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10030 break;
10032 CASE_FLT_FN (BUILT_IN_CASIN):
10033 if (validate_arg (arg0, COMPLEX_TYPE)
10034 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10035 return do_mpc_arg1 (arg0, type, mpc_asin);
10036 break;
10038 CASE_FLT_FN (BUILT_IN_CACOS):
10039 if (validate_arg (arg0, COMPLEX_TYPE)
10040 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10041 return do_mpc_arg1 (arg0, type, mpc_acos);
10042 break;
10044 CASE_FLT_FN (BUILT_IN_CATAN):
10045 if (validate_arg (arg0, COMPLEX_TYPE)
10046 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10047 return do_mpc_arg1 (arg0, type, mpc_atan);
10048 break;
10050 CASE_FLT_FN (BUILT_IN_CASINH):
10051 if (validate_arg (arg0, COMPLEX_TYPE)
10052 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10053 return do_mpc_arg1 (arg0, type, mpc_asinh);
10054 break;
10056 CASE_FLT_FN (BUILT_IN_CACOSH):
10057 if (validate_arg (arg0, COMPLEX_TYPE)
10058 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10059 return do_mpc_arg1 (arg0, type, mpc_acosh);
10060 break;
10062 CASE_FLT_FN (BUILT_IN_CATANH):
10063 if (validate_arg (arg0, COMPLEX_TYPE)
10064 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10065 return do_mpc_arg1 (arg0, type, mpc_atanh);
10066 break;
10068 CASE_FLT_FN (BUILT_IN_CABS):
10069 return fold_builtin_cabs (loc, arg0, type, fndecl);
10071 CASE_FLT_FN (BUILT_IN_CARG):
10072 return fold_builtin_carg (loc, arg0, type);
10074 CASE_FLT_FN (BUILT_IN_SQRT):
10075 return fold_builtin_sqrt (loc, arg0, type);
10077 CASE_FLT_FN (BUILT_IN_CBRT):
10078 return fold_builtin_cbrt (loc, arg0, type);
10080 CASE_FLT_FN (BUILT_IN_ASIN):
10081 if (validate_arg (arg0, REAL_TYPE))
10082 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10083 &dconstm1, &dconst1, true);
10084 break;
10086 CASE_FLT_FN (BUILT_IN_ACOS):
10087 if (validate_arg (arg0, REAL_TYPE))
10088 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10089 &dconstm1, &dconst1, true);
10090 break;
10092 CASE_FLT_FN (BUILT_IN_ATAN):
10093 if (validate_arg (arg0, REAL_TYPE))
10094 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10095 break;
10097 CASE_FLT_FN (BUILT_IN_ASINH):
10098 if (validate_arg (arg0, REAL_TYPE))
10099 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10100 break;
10102 CASE_FLT_FN (BUILT_IN_ACOSH):
10103 if (validate_arg (arg0, REAL_TYPE))
10104 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10105 &dconst1, NULL, true);
10106 break;
10108 CASE_FLT_FN (BUILT_IN_ATANH):
10109 if (validate_arg (arg0, REAL_TYPE))
10110 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10111 &dconstm1, &dconst1, false);
10112 break;
10114 CASE_FLT_FN (BUILT_IN_SIN):
10115 if (validate_arg (arg0, REAL_TYPE))
10116 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10117 break;
10119 CASE_FLT_FN (BUILT_IN_COS):
10120 return fold_builtin_cos (loc, arg0, type, fndecl);
10122 CASE_FLT_FN (BUILT_IN_TAN):
10123 return fold_builtin_tan (arg0, type);
10125 CASE_FLT_FN (BUILT_IN_CEXP):
10126 return fold_builtin_cexp (loc, arg0, type);
10128 CASE_FLT_FN (BUILT_IN_CEXPI):
10129 if (validate_arg (arg0, REAL_TYPE))
10130 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10131 break;
10133 CASE_FLT_FN (BUILT_IN_SINH):
10134 if (validate_arg (arg0, REAL_TYPE))
10135 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10136 break;
10138 CASE_FLT_FN (BUILT_IN_COSH):
10139 return fold_builtin_cosh (loc, arg0, type, fndecl);
10141 CASE_FLT_FN (BUILT_IN_TANH):
10142 if (validate_arg (arg0, REAL_TYPE))
10143 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10144 break;
10146 CASE_FLT_FN (BUILT_IN_ERF):
10147 if (validate_arg (arg0, REAL_TYPE))
10148 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10149 break;
10151 CASE_FLT_FN (BUILT_IN_ERFC):
10152 if (validate_arg (arg0, REAL_TYPE))
10153 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10154 break;
10156 CASE_FLT_FN (BUILT_IN_TGAMMA):
10157 if (validate_arg (arg0, REAL_TYPE))
10158 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10159 break;
10161 CASE_FLT_FN (BUILT_IN_EXP):
10162 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10164 CASE_FLT_FN (BUILT_IN_EXP2):
10165 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10167 CASE_FLT_FN (BUILT_IN_EXP10):
10168 CASE_FLT_FN (BUILT_IN_POW10):
10169 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10171 CASE_FLT_FN (BUILT_IN_EXPM1):
10172 if (validate_arg (arg0, REAL_TYPE))
10173 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10174 break;
10176 CASE_FLT_FN (BUILT_IN_LOG):
10177 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10179 CASE_FLT_FN (BUILT_IN_LOG2):
10180 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10182 CASE_FLT_FN (BUILT_IN_LOG10):
10183 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10185 CASE_FLT_FN (BUILT_IN_LOG1P):
10186 if (validate_arg (arg0, REAL_TYPE))
10187 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10188 &dconstm1, NULL, false);
10189 break;
10191 CASE_FLT_FN (BUILT_IN_J0):
10192 if (validate_arg (arg0, REAL_TYPE))
10193 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10194 NULL, NULL, 0);
10195 break;
10197 CASE_FLT_FN (BUILT_IN_J1):
10198 if (validate_arg (arg0, REAL_TYPE))
10199 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10200 NULL, NULL, 0);
10201 break;
10203 CASE_FLT_FN (BUILT_IN_Y0):
10204 if (validate_arg (arg0, REAL_TYPE))
10205 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10206 &dconst0, NULL, false);
10207 break;
10209 CASE_FLT_FN (BUILT_IN_Y1):
10210 if (validate_arg (arg0, REAL_TYPE))
10211 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10212 &dconst0, NULL, false);
10213 break;
10215 CASE_FLT_FN (BUILT_IN_NAN):
10216 case BUILT_IN_NAND32:
10217 case BUILT_IN_NAND64:
10218 case BUILT_IN_NAND128:
10219 return fold_builtin_nan (arg0, type, true);
10221 CASE_FLT_FN (BUILT_IN_NANS):
10222 return fold_builtin_nan (arg0, type, false);
10224 CASE_FLT_FN (BUILT_IN_FLOOR):
10225 return fold_builtin_floor (loc, fndecl, arg0);
10227 CASE_FLT_FN (BUILT_IN_CEIL):
10228 return fold_builtin_ceil (loc, fndecl, arg0);
10230 CASE_FLT_FN (BUILT_IN_TRUNC):
10231 return fold_builtin_trunc (loc, fndecl, arg0);
10233 CASE_FLT_FN (BUILT_IN_ROUND):
10234 return fold_builtin_round (loc, fndecl, arg0);
10236 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10237 CASE_FLT_FN (BUILT_IN_RINT):
10238 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10240 CASE_FLT_FN (BUILT_IN_LCEIL):
10241 CASE_FLT_FN (BUILT_IN_LLCEIL):
10242 CASE_FLT_FN (BUILT_IN_LFLOOR):
10243 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10244 CASE_FLT_FN (BUILT_IN_LROUND):
10245 CASE_FLT_FN (BUILT_IN_LLROUND):
10246 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10248 CASE_FLT_FN (BUILT_IN_LRINT):
10249 CASE_FLT_FN (BUILT_IN_LLRINT):
10250 return fold_fixed_mathfn (loc, fndecl, arg0);
10252 case BUILT_IN_BSWAP32:
10253 case BUILT_IN_BSWAP64:
10254 return fold_builtin_bswap (fndecl, arg0);
10256 CASE_INT_FN (BUILT_IN_FFS):
10257 CASE_INT_FN (BUILT_IN_CLZ):
10258 CASE_INT_FN (BUILT_IN_CTZ):
10259 CASE_INT_FN (BUILT_IN_POPCOUNT):
10260 CASE_INT_FN (BUILT_IN_PARITY):
10261 return fold_builtin_bitop (fndecl, arg0);
10263 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10264 return fold_builtin_signbit (loc, arg0, type);
10266 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10267 return fold_builtin_significand (loc, arg0, type);
10269 CASE_FLT_FN (BUILT_IN_ILOGB):
10270 CASE_FLT_FN (BUILT_IN_LOGB):
10271 return fold_builtin_logb (loc, arg0, type);
10273 case BUILT_IN_ISASCII:
10274 return fold_builtin_isascii (loc, arg0);
10276 case BUILT_IN_TOASCII:
10277 return fold_builtin_toascii (loc, arg0);
10279 case BUILT_IN_ISDIGIT:
10280 return fold_builtin_isdigit (loc, arg0);
10282 CASE_FLT_FN (BUILT_IN_FINITE):
10283 case BUILT_IN_FINITED32:
10284 case BUILT_IN_FINITED64:
10285 case BUILT_IN_FINITED128:
10286 case BUILT_IN_ISFINITE:
10288 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10289 if (ret)
10290 return ret;
10291 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10294 CASE_FLT_FN (BUILT_IN_ISINF):
10295 case BUILT_IN_ISINFD32:
10296 case BUILT_IN_ISINFD64:
10297 case BUILT_IN_ISINFD128:
10299 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10300 if (ret)
10301 return ret;
10302 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10305 case BUILT_IN_ISNORMAL:
10306 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10308 case BUILT_IN_ISINF_SIGN:
10309 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10311 CASE_FLT_FN (BUILT_IN_ISNAN):
10312 case BUILT_IN_ISNAND32:
10313 case BUILT_IN_ISNAND64:
10314 case BUILT_IN_ISNAND128:
10315 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10317 case BUILT_IN_PRINTF:
10318 case BUILT_IN_PRINTF_UNLOCKED:
10319 case BUILT_IN_VPRINTF:
10320 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10322 case BUILT_IN_FREE:
10323 if (integer_zerop (arg0))
10324 return build_empty_stmt (loc);
10325 break;
10327 default:
10328 break;
10331 return NULL_TREE;
10335 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10336 IGNORE is true if the result of the function call is ignored. This
10337 function returns NULL_TREE if no simplification was possible. */
10339 static tree
10340 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10342 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10343 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10345 switch (fcode)
10347 CASE_FLT_FN (BUILT_IN_JN):
10348 if (validate_arg (arg0, INTEGER_TYPE)
10349 && validate_arg (arg1, REAL_TYPE))
10350 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10351 break;
10353 CASE_FLT_FN (BUILT_IN_YN):
10354 if (validate_arg (arg0, INTEGER_TYPE)
10355 && validate_arg (arg1, REAL_TYPE))
10356 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10357 &dconst0, false);
10358 break;
10360 CASE_FLT_FN (BUILT_IN_DREM):
10361 CASE_FLT_FN (BUILT_IN_REMAINDER):
10362 if (validate_arg (arg0, REAL_TYPE)
10363 && validate_arg(arg1, REAL_TYPE))
10364 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10365 break;
10367 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10368 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10369 if (validate_arg (arg0, REAL_TYPE)
10370 && validate_arg(arg1, POINTER_TYPE))
10371 return do_mpfr_lgamma_r (arg0, arg1, type);
10372 break;
10374 CASE_FLT_FN (BUILT_IN_ATAN2):
10375 if (validate_arg (arg0, REAL_TYPE)
10376 && validate_arg(arg1, REAL_TYPE))
10377 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10378 break;
10380 CASE_FLT_FN (BUILT_IN_FDIM):
10381 if (validate_arg (arg0, REAL_TYPE)
10382 && validate_arg(arg1, REAL_TYPE))
10383 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10384 break;
10386 CASE_FLT_FN (BUILT_IN_HYPOT):
10387 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10389 CASE_FLT_FN (BUILT_IN_CPOW):
10390 if (validate_arg (arg0, COMPLEX_TYPE)
10391 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10392 && validate_arg (arg1, COMPLEX_TYPE)
10393 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10394 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10395 break;
10397 CASE_FLT_FN (BUILT_IN_LDEXP):
10398 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10399 CASE_FLT_FN (BUILT_IN_SCALBN):
10400 CASE_FLT_FN (BUILT_IN_SCALBLN):
10401 return fold_builtin_load_exponent (loc, arg0, arg1,
10402 type, /*ldexp=*/false);
10404 CASE_FLT_FN (BUILT_IN_FREXP):
10405 return fold_builtin_frexp (loc, arg0, arg1, type);
10407 CASE_FLT_FN (BUILT_IN_MODF):
10408 return fold_builtin_modf (loc, arg0, arg1, type);
10410 case BUILT_IN_BZERO:
10411 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10413 case BUILT_IN_FPUTS:
10414 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10416 case BUILT_IN_FPUTS_UNLOCKED:
10417 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10419 case BUILT_IN_STRSTR:
10420 return fold_builtin_strstr (loc, arg0, arg1, type);
10422 case BUILT_IN_STRCAT:
10423 return fold_builtin_strcat (loc, arg0, arg1);
10425 case BUILT_IN_STRSPN:
10426 return fold_builtin_strspn (loc, arg0, arg1);
10428 case BUILT_IN_STRCSPN:
10429 return fold_builtin_strcspn (loc, arg0, arg1);
10431 case BUILT_IN_STRCHR:
10432 case BUILT_IN_INDEX:
10433 return fold_builtin_strchr (loc, arg0, arg1, type);
10435 case BUILT_IN_STRRCHR:
10436 case BUILT_IN_RINDEX:
10437 return fold_builtin_strrchr (loc, arg0, arg1, type);
10439 case BUILT_IN_STRCPY:
10440 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10442 case BUILT_IN_STPCPY:
10443 if (ignore)
10445 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10446 if (!fn)
10447 break;
10449 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10451 else
10452 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10453 break;
10455 case BUILT_IN_STRCMP:
10456 return fold_builtin_strcmp (loc, arg0, arg1);
10458 case BUILT_IN_STRPBRK:
10459 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10461 case BUILT_IN_EXPECT:
10462 return fold_builtin_expect (loc, arg0, arg1);
10464 CASE_FLT_FN (BUILT_IN_POW):
10465 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10467 CASE_FLT_FN (BUILT_IN_POWI):
10468 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10470 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10471 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10473 CASE_FLT_FN (BUILT_IN_FMIN):
10474 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10476 CASE_FLT_FN (BUILT_IN_FMAX):
10477 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10479 case BUILT_IN_ISGREATER:
10480 return fold_builtin_unordered_cmp (loc, fndecl,
10481 arg0, arg1, UNLE_EXPR, LE_EXPR);
10482 case BUILT_IN_ISGREATEREQUAL:
10483 return fold_builtin_unordered_cmp (loc, fndecl,
10484 arg0, arg1, UNLT_EXPR, LT_EXPR);
10485 case BUILT_IN_ISLESS:
10486 return fold_builtin_unordered_cmp (loc, fndecl,
10487 arg0, arg1, UNGE_EXPR, GE_EXPR);
10488 case BUILT_IN_ISLESSEQUAL:
10489 return fold_builtin_unordered_cmp (loc, fndecl,
10490 arg0, arg1, UNGT_EXPR, GT_EXPR);
10491 case BUILT_IN_ISLESSGREATER:
10492 return fold_builtin_unordered_cmp (loc, fndecl,
10493 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10494 case BUILT_IN_ISUNORDERED:
10495 return fold_builtin_unordered_cmp (loc, fndecl,
10496 arg0, arg1, UNORDERED_EXPR,
10497 NOP_EXPR);
10499 /* We do the folding for va_start in the expander. */
10500 case BUILT_IN_VA_START:
10501 break;
10503 case BUILT_IN_SPRINTF:
10504 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10506 case BUILT_IN_OBJECT_SIZE:
10507 return fold_builtin_object_size (arg0, arg1);
10509 case BUILT_IN_PRINTF:
10510 case BUILT_IN_PRINTF_UNLOCKED:
10511 case BUILT_IN_VPRINTF:
10512 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10514 case BUILT_IN_PRINTF_CHK:
10515 case BUILT_IN_VPRINTF_CHK:
10516 if (!validate_arg (arg0, INTEGER_TYPE)
10517 || TREE_SIDE_EFFECTS (arg0))
10518 return NULL_TREE;
10519 else
10520 return fold_builtin_printf (loc, fndecl,
10521 arg1, NULL_TREE, ignore, fcode);
10522 break;
10524 case BUILT_IN_FPRINTF:
10525 case BUILT_IN_FPRINTF_UNLOCKED:
10526 case BUILT_IN_VFPRINTF:
10527 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10528 ignore, fcode);
10530 default:
10531 break;
10533 return NULL_TREE;
10536 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10537 and ARG2. IGNORE is true if the result of the function call is ignored.
10538 This function returns NULL_TREE if no simplification was possible. */
10540 static tree
10541 fold_builtin_3 (location_t loc, tree fndecl,
10542 tree arg0, tree arg1, tree arg2, bool ignore)
10544 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10545 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10546 switch (fcode)
10549 CASE_FLT_FN (BUILT_IN_SINCOS):
10550 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10552 CASE_FLT_FN (BUILT_IN_FMA):
10553 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10554 break;
10556 CASE_FLT_FN (BUILT_IN_REMQUO):
10557 if (validate_arg (arg0, REAL_TYPE)
10558 && validate_arg(arg1, REAL_TYPE)
10559 && validate_arg(arg2, POINTER_TYPE))
10560 return do_mpfr_remquo (arg0, arg1, arg2);
10561 break;
10563 case BUILT_IN_MEMSET:
10564 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10566 case BUILT_IN_BCOPY:
10567 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10568 void_type_node, true, /*endp=*/3);
10570 case BUILT_IN_MEMCPY:
10571 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10572 type, ignore, /*endp=*/0);
10574 case BUILT_IN_MEMPCPY:
10575 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10576 type, ignore, /*endp=*/1);
10578 case BUILT_IN_MEMMOVE:
10579 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10580 type, ignore, /*endp=*/3);
10582 case BUILT_IN_STRNCAT:
10583 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10585 case BUILT_IN_STRNCPY:
10586 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10588 case BUILT_IN_STRNCMP:
10589 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10591 case BUILT_IN_MEMCHR:
10592 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10594 case BUILT_IN_BCMP:
10595 case BUILT_IN_MEMCMP:
10596 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10598 case BUILT_IN_SPRINTF:
10599 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10601 case BUILT_IN_STRCPY_CHK:
10602 case BUILT_IN_STPCPY_CHK:
10603 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10604 ignore, fcode);
10606 case BUILT_IN_STRCAT_CHK:
10607 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10609 case BUILT_IN_PRINTF_CHK:
10610 case BUILT_IN_VPRINTF_CHK:
10611 if (!validate_arg (arg0, INTEGER_TYPE)
10612 || TREE_SIDE_EFFECTS (arg0))
10613 return NULL_TREE;
10614 else
10615 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10616 break;
10618 case BUILT_IN_FPRINTF:
10619 case BUILT_IN_FPRINTF_UNLOCKED:
10620 case BUILT_IN_VFPRINTF:
10621 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10622 ignore, fcode);
10624 case BUILT_IN_FPRINTF_CHK:
10625 case BUILT_IN_VFPRINTF_CHK:
10626 if (!validate_arg (arg1, INTEGER_TYPE)
10627 || TREE_SIDE_EFFECTS (arg1))
10628 return NULL_TREE;
10629 else
10630 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10631 ignore, fcode);
10633 default:
10634 break;
10636 return NULL_TREE;
10639 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10640 ARG2, and ARG3. IGNORE is true if the result of the function call is
10641 ignored. This function returns NULL_TREE if no simplification was
10642 possible. */
10644 static tree
10645 fold_builtin_4 (location_t loc, tree fndecl,
10646 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10648 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10650 switch (fcode)
10652 case BUILT_IN_MEMCPY_CHK:
10653 case BUILT_IN_MEMPCPY_CHK:
10654 case BUILT_IN_MEMMOVE_CHK:
10655 case BUILT_IN_MEMSET_CHK:
10656 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10657 NULL_TREE, ignore,
10658 DECL_FUNCTION_CODE (fndecl));
10660 case BUILT_IN_STRNCPY_CHK:
10661 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10663 case BUILT_IN_STRNCAT_CHK:
10664 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10666 case BUILT_IN_FPRINTF_CHK:
10667 case BUILT_IN_VFPRINTF_CHK:
10668 if (!validate_arg (arg1, INTEGER_TYPE)
10669 || TREE_SIDE_EFFECTS (arg1))
10670 return NULL_TREE;
10671 else
10672 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10673 ignore, fcode);
10674 break;
10676 default:
10677 break;
10679 return NULL_TREE;
10682 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10683 arguments, where NARGS <= 4. IGNORE is true if the result of the
10684 function call is ignored. This function returns NULL_TREE if no
10685 simplification was possible. Note that this only folds builtins with
10686 fixed argument patterns. Foldings that do varargs-to-varargs
10687 transformations, or that match calls with more than 4 arguments,
10688 need to be handled with fold_builtin_varargs instead. */
10690 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10692 static tree
10693 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10695 tree ret = NULL_TREE;
10697 switch (nargs)
10699 case 0:
10700 ret = fold_builtin_0 (loc, fndecl, ignore);
10701 break;
10702 case 1:
10703 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10704 break;
10705 case 2:
10706 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10707 break;
10708 case 3:
10709 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10710 break;
10711 case 4:
10712 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10713 ignore);
10714 break;
10715 default:
10716 break;
10718 if (ret)
10720 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10721 SET_EXPR_LOCATION (ret, loc);
10722 TREE_NO_WARNING (ret) = 1;
10723 return ret;
10725 return NULL_TREE;
10728 /* Builtins with folding operations that operate on "..." arguments
10729 need special handling; we need to store the arguments in a convenient
10730 data structure before attempting any folding. Fortunately there are
10731 only a few builtins that fall into this category. FNDECL is the
10732 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10733 result of the function call is ignored. */
10735 static tree
10736 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10737 bool ignore ATTRIBUTE_UNUSED)
10739 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10740 tree ret = NULL_TREE;
10742 switch (fcode)
10744 case BUILT_IN_SPRINTF_CHK:
10745 case BUILT_IN_VSPRINTF_CHK:
10746 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10747 break;
10749 case BUILT_IN_SNPRINTF_CHK:
10750 case BUILT_IN_VSNPRINTF_CHK:
10751 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10752 break;
10754 case BUILT_IN_FPCLASSIFY:
10755 ret = fold_builtin_fpclassify (loc, exp);
10756 break;
10758 default:
10759 break;
10761 if (ret)
10763 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10764 SET_EXPR_LOCATION (ret, loc);
10765 TREE_NO_WARNING (ret) = 1;
10766 return ret;
10768 return NULL_TREE;
10771 /* Return true if FNDECL shouldn't be folded right now.
10772 If a built-in function has an inline attribute always_inline
10773 wrapper, defer folding it after always_inline functions have
10774 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10775 might not be performed. */
10777 static bool
10778 avoid_folding_inline_builtin (tree fndecl)
10780 return (DECL_DECLARED_INLINE_P (fndecl)
10781 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10782 && cfun
10783 && !cfun->always_inline_functions_inlined
10784 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10787 /* A wrapper function for builtin folding that prevents warnings for
10788 "statement without effect" and the like, caused by removing the
10789 call node earlier than the warning is generated. */
10791 tree
10792 fold_call_expr (location_t loc, tree exp, bool ignore)
10794 tree ret = NULL_TREE;
10795 tree fndecl = get_callee_fndecl (exp);
10796 if (fndecl
10797 && TREE_CODE (fndecl) == FUNCTION_DECL
10798 && DECL_BUILT_IN (fndecl)
10799 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10800 yet. Defer folding until we see all the arguments
10801 (after inlining). */
10802 && !CALL_EXPR_VA_ARG_PACK (exp))
10804 int nargs = call_expr_nargs (exp);
10806 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10807 instead last argument is __builtin_va_arg_pack (). Defer folding
10808 even in that case, until arguments are finalized. */
10809 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10811 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10812 if (fndecl2
10813 && TREE_CODE (fndecl2) == FUNCTION_DECL
10814 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10815 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10816 return NULL_TREE;
10819 if (avoid_folding_inline_builtin (fndecl))
10820 return NULL_TREE;
10822 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10823 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10824 CALL_EXPR_ARGP (exp), ignore);
10825 else
10827 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10829 tree *args = CALL_EXPR_ARGP (exp);
10830 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10832 if (!ret)
10833 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10834 if (ret)
10835 return ret;
10838 return NULL_TREE;
10841 /* Conveniently construct a function call expression. FNDECL names the
10842 function to be called and N arguments are passed in the array
10843 ARGARRAY. */
10845 tree
10846 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10848 tree fntype = TREE_TYPE (fndecl);
10849 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10851 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10854 /* Conveniently construct a function call expression. FNDECL names the
10855 function to be called and the arguments are passed in the vector
10856 VEC. */
10858 tree
10859 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10861 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10862 VEC_address (tree, vec));
10866 /* Conveniently construct a function call expression. FNDECL names the
10867 function to be called, N is the number of arguments, and the "..."
10868 parameters are the argument expressions. */
10870 tree
10871 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10873 va_list ap;
10874 tree *argarray = XALLOCAVEC (tree, n);
10875 int i;
10877 va_start (ap, n);
10878 for (i = 0; i < n; i++)
10879 argarray[i] = va_arg (ap, tree);
10880 va_end (ap);
10881 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10884 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10885 varargs macros aren't supported by all bootstrap compilers. */
10887 tree
10888 build_call_expr (tree fndecl, int n, ...)
10890 va_list ap;
10891 tree *argarray = XALLOCAVEC (tree, n);
10892 int i;
10894 va_start (ap, n);
10895 for (i = 0; i < n; i++)
10896 argarray[i] = va_arg (ap, tree);
10897 va_end (ap);
10898 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10901 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10902 N arguments are passed in the array ARGARRAY. */
10904 tree
10905 fold_builtin_call_array (location_t loc, tree type,
10906 tree fn,
10907 int n,
10908 tree *argarray)
10910 tree ret = NULL_TREE;
10911 tree exp;
10913 if (TREE_CODE (fn) == ADDR_EXPR)
10915 tree fndecl = TREE_OPERAND (fn, 0);
10916 if (TREE_CODE (fndecl) == FUNCTION_DECL
10917 && DECL_BUILT_IN (fndecl))
10919 /* If last argument is __builtin_va_arg_pack (), arguments to this
10920 function are not finalized yet. Defer folding until they are. */
10921 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10923 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10924 if (fndecl2
10925 && TREE_CODE (fndecl2) == FUNCTION_DECL
10926 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10927 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10928 return build_call_array_loc (loc, type, fn, n, argarray);
10930 if (avoid_folding_inline_builtin (fndecl))
10931 return build_call_array_loc (loc, type, fn, n, argarray);
10932 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10934 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10935 if (ret)
10936 return ret;
10938 return build_call_array_loc (loc, type, fn, n, argarray);
10940 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10942 /* First try the transformations that don't require consing up
10943 an exp. */
10944 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10945 if (ret)
10946 return ret;
10949 /* If we got this far, we need to build an exp. */
10950 exp = build_call_array_loc (loc, type, fn, n, argarray);
10951 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10952 return ret ? ret : exp;
10956 return build_call_array_loc (loc, type, fn, n, argarray);
10959 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10960 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10961 of arguments in ARGS to be omitted. OLDNARGS is the number of
10962 elements in ARGS. */
10964 static tree
10965 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10966 int skip, tree fndecl, int n, va_list newargs)
10968 int nargs = oldnargs - skip + n;
10969 tree *buffer;
10971 if (n > 0)
10973 int i, j;
10975 buffer = XALLOCAVEC (tree, nargs);
10976 for (i = 0; i < n; i++)
10977 buffer[i] = va_arg (newargs, tree);
10978 for (j = skip; j < oldnargs; j++, i++)
10979 buffer[i] = args[j];
10981 else
10982 buffer = args + skip;
10984 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10987 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10988 list ARGS along with N new arguments specified as the "..."
10989 parameters. SKIP is the number of arguments in ARGS to be omitted.
10990 OLDNARGS is the number of elements in ARGS. */
10992 static tree
10993 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
10994 int skip, tree fndecl, int n, ...)
10996 va_list ap;
10997 tree t;
10999 va_start (ap, n);
11000 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11001 va_end (ap);
11003 return t;
11006 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11007 along with N new arguments specified as the "..." parameters. SKIP
11008 is the number of arguments in EXP to be omitted. This function is used
11009 to do varargs-to-varargs transformations. */
11011 static tree
11012 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11014 va_list ap;
11015 tree t;
11017 va_start (ap, n);
11018 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11019 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11020 va_end (ap);
11022 return t;
11025 /* Validate a single argument ARG against a tree code CODE representing
11026 a type. */
11028 static bool
11029 validate_arg (const_tree arg, enum tree_code code)
11031 if (!arg)
11032 return false;
11033 else if (code == POINTER_TYPE)
11034 return POINTER_TYPE_P (TREE_TYPE (arg));
11035 else if (code == INTEGER_TYPE)
11036 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11037 return code == TREE_CODE (TREE_TYPE (arg));
11040 /* This function validates the types of a function call argument list
11041 against a specified list of tree_codes. If the last specifier is a 0,
11042 that represents an ellipses, otherwise the last specifier must be a
11043 VOID_TYPE.
11045 This is the GIMPLE version of validate_arglist. Eventually we want to
11046 completely convert builtins.c to work from GIMPLEs and the tree based
11047 validate_arglist will then be removed. */
11049 bool
11050 validate_gimple_arglist (const_gimple call, ...)
11052 enum tree_code code;
11053 bool res = 0;
11054 va_list ap;
11055 const_tree arg;
11056 size_t i;
11058 va_start (ap, call);
11059 i = 0;
11063 code = (enum tree_code) va_arg (ap, int);
11064 switch (code)
11066 case 0:
11067 /* This signifies an ellipses, any further arguments are all ok. */
11068 res = true;
11069 goto end;
11070 case VOID_TYPE:
11071 /* This signifies an endlink, if no arguments remain, return
11072 true, otherwise return false. */
11073 res = (i == gimple_call_num_args (call));
11074 goto end;
11075 default:
11076 /* If no parameters remain or the parameter's code does not
11077 match the specified code, return false. Otherwise continue
11078 checking any remaining arguments. */
11079 arg = gimple_call_arg (call, i++);
11080 if (!validate_arg (arg, code))
11081 goto end;
11082 break;
11085 while (1);
11087 /* We need gotos here since we can only have one VA_CLOSE in a
11088 function. */
11089 end: ;
11090 va_end (ap);
11092 return res;
11095 /* This function validates the types of a function call argument list
11096 against a specified list of tree_codes. If the last specifier is a 0,
11097 that represents an ellipses, otherwise the last specifier must be a
11098 VOID_TYPE. */
11100 bool
11101 validate_arglist (const_tree callexpr, ...)
11103 enum tree_code code;
11104 bool res = 0;
11105 va_list ap;
11106 const_call_expr_arg_iterator iter;
11107 const_tree arg;
11109 va_start (ap, callexpr);
11110 init_const_call_expr_arg_iterator (callexpr, &iter);
11114 code = (enum tree_code) va_arg (ap, int);
11115 switch (code)
11117 case 0:
11118 /* This signifies an ellipses, any further arguments are all ok. */
11119 res = true;
11120 goto end;
11121 case VOID_TYPE:
11122 /* This signifies an endlink, if no arguments remain, return
11123 true, otherwise return false. */
11124 res = !more_const_call_expr_args_p (&iter);
11125 goto end;
11126 default:
11127 /* If no parameters remain or the parameter's code does not
11128 match the specified code, return false. Otherwise continue
11129 checking any remaining arguments. */
11130 arg = next_const_call_expr_arg (&iter);
11131 if (!validate_arg (arg, code))
11132 goto end;
11133 break;
11136 while (1);
11138 /* We need gotos here since we can only have one VA_CLOSE in a
11139 function. */
11140 end: ;
11141 va_end (ap);
11143 return res;
11146 /* Default target-specific builtin expander that does nothing. */
11149 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11150 rtx target ATTRIBUTE_UNUSED,
11151 rtx subtarget ATTRIBUTE_UNUSED,
11152 enum machine_mode mode ATTRIBUTE_UNUSED,
11153 int ignore ATTRIBUTE_UNUSED)
11155 return NULL_RTX;
11158 /* Returns true is EXP represents data that would potentially reside
11159 in a readonly section. */
11161 static bool
11162 readonly_data_expr (tree exp)
11164 STRIP_NOPS (exp);
11166 if (TREE_CODE (exp) != ADDR_EXPR)
11167 return false;
11169 exp = get_base_address (TREE_OPERAND (exp, 0));
11170 if (!exp)
11171 return false;
11173 /* Make sure we call decl_readonly_section only for trees it
11174 can handle (since it returns true for everything it doesn't
11175 understand). */
11176 if (TREE_CODE (exp) == STRING_CST
11177 || TREE_CODE (exp) == CONSTRUCTOR
11178 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11179 return decl_readonly_section (exp, 0);
11180 else
11181 return false;
11184 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11185 to the call, and TYPE is its return type.
11187 Return NULL_TREE if no simplification was possible, otherwise return the
11188 simplified form of the call as a tree.
11190 The simplified form may be a constant or other expression which
11191 computes the same value, but in a more efficient manner (including
11192 calls to other builtin functions).
11194 The call may contain arguments which need to be evaluated, but
11195 which are not useful to determine the result of the call. In
11196 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11197 COMPOUND_EXPR will be an argument which must be evaluated.
11198 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11199 COMPOUND_EXPR in the chain will contain the tree for the simplified
11200 form of the builtin function call. */
11202 static tree
11203 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11205 if (!validate_arg (s1, POINTER_TYPE)
11206 || !validate_arg (s2, POINTER_TYPE))
11207 return NULL_TREE;
11208 else
11210 tree fn;
11211 const char *p1, *p2;
11213 p2 = c_getstr (s2);
11214 if (p2 == NULL)
11215 return NULL_TREE;
11217 p1 = c_getstr (s1);
11218 if (p1 != NULL)
11220 const char *r = strstr (p1, p2);
11221 tree tem;
11223 if (r == NULL)
11224 return build_int_cst (TREE_TYPE (s1), 0);
11226 /* Return an offset into the constant string argument. */
11227 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11228 s1, size_int (r - p1));
11229 return fold_convert_loc (loc, type, tem);
11232 /* The argument is const char *, and the result is char *, so we need
11233 a type conversion here to avoid a warning. */
11234 if (p2[0] == '\0')
11235 return fold_convert_loc (loc, type, s1);
11237 if (p2[1] != '\0')
11238 return NULL_TREE;
11240 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11241 if (!fn)
11242 return NULL_TREE;
11244 /* New argument list transforming strstr(s1, s2) to
11245 strchr(s1, s2[0]). */
11246 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11250 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11251 the call, and TYPE is its return type.
11253 Return NULL_TREE if no simplification was possible, otherwise return the
11254 simplified form of the call as a tree.
11256 The simplified form may be a constant or other expression which
11257 computes the same value, but in a more efficient manner (including
11258 calls to other builtin functions).
11260 The call may contain arguments which need to be evaluated, but
11261 which are not useful to determine the result of the call. In
11262 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11263 COMPOUND_EXPR will be an argument which must be evaluated.
11264 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11265 COMPOUND_EXPR in the chain will contain the tree for the simplified
11266 form of the builtin function call. */
11268 static tree
11269 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11271 if (!validate_arg (s1, POINTER_TYPE)
11272 || !validate_arg (s2, INTEGER_TYPE))
11273 return NULL_TREE;
11274 else
11276 const char *p1;
11278 if (TREE_CODE (s2) != INTEGER_CST)
11279 return NULL_TREE;
11281 p1 = c_getstr (s1);
11282 if (p1 != NULL)
11284 char c;
11285 const char *r;
11286 tree tem;
11288 if (target_char_cast (s2, &c))
11289 return NULL_TREE;
11291 r = strchr (p1, c);
11293 if (r == NULL)
11294 return build_int_cst (TREE_TYPE (s1), 0);
11296 /* Return an offset into the constant string argument. */
11297 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11298 s1, size_int (r - p1));
11299 return fold_convert_loc (loc, type, tem);
11301 return NULL_TREE;
11305 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11306 the call, and TYPE is its return type.
11308 Return NULL_TREE if no simplification was possible, otherwise return the
11309 simplified form of the call as a tree.
11311 The simplified form may be a constant or other expression which
11312 computes the same value, but in a more efficient manner (including
11313 calls to other builtin functions).
11315 The call may contain arguments which need to be evaluated, but
11316 which are not useful to determine the result of the call. In
11317 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11318 COMPOUND_EXPR will be an argument which must be evaluated.
11319 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11320 COMPOUND_EXPR in the chain will contain the tree for the simplified
11321 form of the builtin function call. */
11323 static tree
11324 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11326 if (!validate_arg (s1, POINTER_TYPE)
11327 || !validate_arg (s2, INTEGER_TYPE))
11328 return NULL_TREE;
11329 else
11331 tree fn;
11332 const char *p1;
11334 if (TREE_CODE (s2) != INTEGER_CST)
11335 return NULL_TREE;
11337 p1 = c_getstr (s1);
11338 if (p1 != NULL)
11340 char c;
11341 const char *r;
11342 tree tem;
11344 if (target_char_cast (s2, &c))
11345 return NULL_TREE;
11347 r = strrchr (p1, c);
11349 if (r == NULL)
11350 return build_int_cst (TREE_TYPE (s1), 0);
11352 /* Return an offset into the constant string argument. */
11353 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11354 s1, size_int (r - p1));
11355 return fold_convert_loc (loc, type, tem);
11358 if (! integer_zerop (s2))
11359 return NULL_TREE;
11361 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11362 if (!fn)
11363 return NULL_TREE;
11365 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11366 return build_call_expr_loc (loc, fn, 2, s1, s2);
11370 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11371 to the call, and TYPE is its return type.
11373 Return NULL_TREE if no simplification was possible, otherwise return the
11374 simplified form of the call as a tree.
11376 The simplified form may be a constant or other expression which
11377 computes the same value, but in a more efficient manner (including
11378 calls to other builtin functions).
11380 The call may contain arguments which need to be evaluated, but
11381 which are not useful to determine the result of the call. In
11382 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11383 COMPOUND_EXPR will be an argument which must be evaluated.
11384 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11385 COMPOUND_EXPR in the chain will contain the tree for the simplified
11386 form of the builtin function call. */
11388 static tree
11389 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11391 if (!validate_arg (s1, POINTER_TYPE)
11392 || !validate_arg (s2, POINTER_TYPE))
11393 return NULL_TREE;
11394 else
11396 tree fn;
11397 const char *p1, *p2;
11399 p2 = c_getstr (s2);
11400 if (p2 == NULL)
11401 return NULL_TREE;
11403 p1 = c_getstr (s1);
11404 if (p1 != NULL)
11406 const char *r = strpbrk (p1, p2);
11407 tree tem;
11409 if (r == NULL)
11410 return build_int_cst (TREE_TYPE (s1), 0);
11412 /* Return an offset into the constant string argument. */
11413 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11414 s1, size_int (r - p1));
11415 return fold_convert_loc (loc, type, tem);
11418 if (p2[0] == '\0')
11419 /* strpbrk(x, "") == NULL.
11420 Evaluate and ignore s1 in case it had side-effects. */
11421 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11423 if (p2[1] != '\0')
11424 return NULL_TREE; /* Really call strpbrk. */
11426 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11427 if (!fn)
11428 return NULL_TREE;
11430 /* New argument list transforming strpbrk(s1, s2) to
11431 strchr(s1, s2[0]). */
11432 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11436 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11437 to the call.
11439 Return NULL_TREE if no simplification was possible, otherwise return the
11440 simplified form of the call as a tree.
11442 The simplified form may be a constant or other expression which
11443 computes the same value, but in a more efficient manner (including
11444 calls to other builtin functions).
11446 The call may contain arguments which need to be evaluated, but
11447 which are not useful to determine the result of the call. In
11448 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11449 COMPOUND_EXPR will be an argument which must be evaluated.
11450 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11451 COMPOUND_EXPR in the chain will contain the tree for the simplified
11452 form of the builtin function call. */
11454 static tree
11455 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11457 if (!validate_arg (dst, POINTER_TYPE)
11458 || !validate_arg (src, POINTER_TYPE))
11459 return NULL_TREE;
11460 else
11462 const char *p = c_getstr (src);
11464 /* If the string length is zero, return the dst parameter. */
11465 if (p && *p == '\0')
11466 return dst;
11468 if (optimize_insn_for_speed_p ())
11470 /* See if we can store by pieces into (dst + strlen(dst)). */
11471 tree newdst, call;
11472 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11473 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11475 if (!strlen_fn || !strcpy_fn)
11476 return NULL_TREE;
11478 /* If we don't have a movstr we don't want to emit an strcpy
11479 call. We have to do that if the length of the source string
11480 isn't computable (in that case we can use memcpy probably
11481 later expanding to a sequence of mov instructions). If we
11482 have movstr instructions we can emit strcpy calls. */
11483 if (!HAVE_movstr)
11485 tree len = c_strlen (src, 1);
11486 if (! len || TREE_SIDE_EFFECTS (len))
11487 return NULL_TREE;
11490 /* Stabilize the argument list. */
11491 dst = builtin_save_expr (dst);
11493 /* Create strlen (dst). */
11494 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11495 /* Create (dst p+ strlen (dst)). */
11497 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11498 TREE_TYPE (dst), dst, newdst);
11499 newdst = builtin_save_expr (newdst);
11501 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11502 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11504 return NULL_TREE;
11508 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11509 arguments to the call.
11511 Return NULL_TREE if no simplification was possible, otherwise return the
11512 simplified form of the call as a tree.
11514 The simplified form may be a constant or other expression which
11515 computes the same value, but in a more efficient manner (including
11516 calls to other builtin functions).
11518 The call may contain arguments which need to be evaluated, but
11519 which are not useful to determine the result of the call. In
11520 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11521 COMPOUND_EXPR will be an argument which must be evaluated.
11522 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11523 COMPOUND_EXPR in the chain will contain the tree for the simplified
11524 form of the builtin function call. */
11526 static tree
11527 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11529 if (!validate_arg (dst, POINTER_TYPE)
11530 || !validate_arg (src, POINTER_TYPE)
11531 || !validate_arg (len, INTEGER_TYPE))
11532 return NULL_TREE;
11533 else
11535 const char *p = c_getstr (src);
11537 /* If the requested length is zero, or the src parameter string
11538 length is zero, return the dst parameter. */
11539 if (integer_zerop (len) || (p && *p == '\0'))
11540 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11542 /* If the requested len is greater than or equal to the string
11543 length, call strcat. */
11544 if (TREE_CODE (len) == INTEGER_CST && p
11545 && compare_tree_int (len, strlen (p)) >= 0)
11547 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11549 /* If the replacement _DECL isn't initialized, don't do the
11550 transformation. */
11551 if (!fn)
11552 return NULL_TREE;
11554 return build_call_expr_loc (loc, fn, 2, dst, src);
11556 return NULL_TREE;
11560 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11561 to the call.
11563 Return NULL_TREE if no simplification was possible, otherwise return the
11564 simplified form of the call as a tree.
11566 The simplified form may be a constant or other expression which
11567 computes the same value, but in a more efficient manner (including
11568 calls to other builtin functions).
11570 The call may contain arguments which need to be evaluated, but
11571 which are not useful to determine the result of the call. In
11572 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11573 COMPOUND_EXPR will be an argument which must be evaluated.
11574 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11575 COMPOUND_EXPR in the chain will contain the tree for the simplified
11576 form of the builtin function call. */
11578 static tree
11579 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11581 if (!validate_arg (s1, POINTER_TYPE)
11582 || !validate_arg (s2, POINTER_TYPE))
11583 return NULL_TREE;
11584 else
11586 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11588 /* If both arguments are constants, evaluate at compile-time. */
11589 if (p1 && p2)
11591 const size_t r = strspn (p1, p2);
11592 return size_int (r);
11595 /* If either argument is "", return NULL_TREE. */
11596 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11597 /* Evaluate and ignore both arguments in case either one has
11598 side-effects. */
11599 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11600 s1, s2);
11601 return NULL_TREE;
11605 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11606 to the call.
11608 Return NULL_TREE if no simplification was possible, otherwise return the
11609 simplified form of the call as a tree.
11611 The simplified form may be a constant or other expression which
11612 computes the same value, but in a more efficient manner (including
11613 calls to other builtin functions).
11615 The call may contain arguments which need to be evaluated, but
11616 which are not useful to determine the result of the call. In
11617 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11618 COMPOUND_EXPR will be an argument which must be evaluated.
11619 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11620 COMPOUND_EXPR in the chain will contain the tree for the simplified
11621 form of the builtin function call. */
11623 static tree
11624 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11626 if (!validate_arg (s1, POINTER_TYPE)
11627 || !validate_arg (s2, POINTER_TYPE))
11628 return NULL_TREE;
11629 else
11631 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11633 /* If both arguments are constants, evaluate at compile-time. */
11634 if (p1 && p2)
11636 const size_t r = strcspn (p1, p2);
11637 return size_int (r);
11640 /* If the first argument is "", return NULL_TREE. */
11641 if (p1 && *p1 == '\0')
11643 /* Evaluate and ignore argument s2 in case it has
11644 side-effects. */
11645 return omit_one_operand_loc (loc, size_type_node,
11646 size_zero_node, s2);
11649 /* If the second argument is "", return __builtin_strlen(s1). */
11650 if (p2 && *p2 == '\0')
11652 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11654 /* If the replacement _DECL isn't initialized, don't do the
11655 transformation. */
11656 if (!fn)
11657 return NULL_TREE;
11659 return build_call_expr_loc (loc, fn, 1, s1);
11661 return NULL_TREE;
11665 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11666 to the call. IGNORE is true if the value returned
11667 by the builtin will be ignored. UNLOCKED is true is true if this
11668 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11669 the known length of the string. Return NULL_TREE if no simplification
11670 was possible. */
11672 tree
11673 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11674 bool ignore, bool unlocked, tree len)
11676 /* If we're using an unlocked function, assume the other unlocked
11677 functions exist explicitly. */
11678 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11679 : implicit_built_in_decls[BUILT_IN_FPUTC];
11680 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11681 : implicit_built_in_decls[BUILT_IN_FWRITE];
11683 /* If the return value is used, don't do the transformation. */
11684 if (!ignore)
11685 return NULL_TREE;
11687 /* Verify the arguments in the original call. */
11688 if (!validate_arg (arg0, POINTER_TYPE)
11689 || !validate_arg (arg1, POINTER_TYPE))
11690 return NULL_TREE;
11692 if (! len)
11693 len = c_strlen (arg0, 0);
11695 /* Get the length of the string passed to fputs. If the length
11696 can't be determined, punt. */
11697 if (!len
11698 || TREE_CODE (len) != INTEGER_CST)
11699 return NULL_TREE;
11701 switch (compare_tree_int (len, 1))
11703 case -1: /* length is 0, delete the call entirely . */
11704 return omit_one_operand_loc (loc, integer_type_node,
11705 integer_zero_node, arg1);;
11707 case 0: /* length is 1, call fputc. */
11709 const char *p = c_getstr (arg0);
11711 if (p != NULL)
11713 if (fn_fputc)
11714 return build_call_expr_loc (loc, fn_fputc, 2,
11715 build_int_cst (NULL_TREE, p[0]), arg1);
11716 else
11717 return NULL_TREE;
11720 /* FALLTHROUGH */
11721 case 1: /* length is greater than 1, call fwrite. */
11723 /* If optimizing for size keep fputs. */
11724 if (optimize_function_for_size_p (cfun))
11725 return NULL_TREE;
11726 /* New argument list transforming fputs(string, stream) to
11727 fwrite(string, 1, len, stream). */
11728 if (fn_fwrite)
11729 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11730 size_one_node, len, arg1);
11731 else
11732 return NULL_TREE;
11734 default:
11735 gcc_unreachable ();
11737 return NULL_TREE;
11740 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11741 produced. False otherwise. This is done so that we don't output the error
11742 or warning twice or three times. */
11744 bool
11745 fold_builtin_next_arg (tree exp, bool va_start_p)
11747 tree fntype = TREE_TYPE (current_function_decl);
11748 int nargs = call_expr_nargs (exp);
11749 tree arg;
11751 if (!stdarg_p (fntype))
11753 error ("%<va_start%> used in function with fixed args");
11754 return true;
11757 if (va_start_p)
11759 if (va_start_p && (nargs != 2))
11761 error ("wrong number of arguments to function %<va_start%>");
11762 return true;
11764 arg = CALL_EXPR_ARG (exp, 1);
11766 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11767 when we checked the arguments and if needed issued a warning. */
11768 else
11770 if (nargs == 0)
11772 /* Evidently an out of date version of <stdarg.h>; can't validate
11773 va_start's second argument, but can still work as intended. */
11774 warning (0, "%<__builtin_next_arg%> called without an argument");
11775 return true;
11777 else if (nargs > 1)
11779 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11780 return true;
11782 arg = CALL_EXPR_ARG (exp, 0);
11785 if (TREE_CODE (arg) == SSA_NAME)
11786 arg = SSA_NAME_VAR (arg);
11788 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11789 or __builtin_next_arg (0) the first time we see it, after checking
11790 the arguments and if needed issuing a warning. */
11791 if (!integer_zerop (arg))
11793 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11795 /* Strip off all nops for the sake of the comparison. This
11796 is not quite the same as STRIP_NOPS. It does more.
11797 We must also strip off INDIRECT_EXPR for C++ reference
11798 parameters. */
11799 while (CONVERT_EXPR_P (arg)
11800 || TREE_CODE (arg) == INDIRECT_REF)
11801 arg = TREE_OPERAND (arg, 0);
11802 if (arg != last_parm)
11804 /* FIXME: Sometimes with the tree optimizers we can get the
11805 not the last argument even though the user used the last
11806 argument. We just warn and set the arg to be the last
11807 argument so that we will get wrong-code because of
11808 it. */
11809 warning (0, "second parameter of %<va_start%> not last named argument");
11812 /* Undefined by C99 7.15.1.4p4 (va_start):
11813 "If the parameter parmN is declared with the register storage
11814 class, with a function or array type, or with a type that is
11815 not compatible with the type that results after application of
11816 the default argument promotions, the behavior is undefined."
11818 else if (DECL_REGISTER (arg))
11819 warning (0, "undefined behaviour when second parameter of "
11820 "%<va_start%> is declared with %<register%> storage");
11822 /* We want to verify the second parameter just once before the tree
11823 optimizers are run and then avoid keeping it in the tree,
11824 as otherwise we could warn even for correct code like:
11825 void foo (int i, ...)
11826 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11827 if (va_start_p)
11828 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11829 else
11830 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11832 return false;
11836 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11837 ORIG may be null if this is a 2-argument call. We don't attempt to
11838 simplify calls with more than 3 arguments.
11840 Return NULL_TREE if no simplification was possible, otherwise return the
11841 simplified form of the call as a tree. If IGNORED is true, it means that
11842 the caller does not use the returned value of the function. */
11844 static tree
11845 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11846 tree orig, int ignored)
11848 tree call, retval;
11849 const char *fmt_str = NULL;
11851 /* Verify the required arguments in the original call. We deal with two
11852 types of sprintf() calls: 'sprintf (str, fmt)' and
11853 'sprintf (dest, "%s", orig)'. */
11854 if (!validate_arg (dest, POINTER_TYPE)
11855 || !validate_arg (fmt, POINTER_TYPE))
11856 return NULL_TREE;
11857 if (orig && !validate_arg (orig, POINTER_TYPE))
11858 return NULL_TREE;
11860 /* Check whether the format is a literal string constant. */
11861 fmt_str = c_getstr (fmt);
11862 if (fmt_str == NULL)
11863 return NULL_TREE;
11865 call = NULL_TREE;
11866 retval = NULL_TREE;
11868 if (!init_target_chars ())
11869 return NULL_TREE;
11871 /* If the format doesn't contain % args or %%, use strcpy. */
11872 if (strchr (fmt_str, target_percent) == NULL)
11874 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11876 if (!fn)
11877 return NULL_TREE;
11879 /* Don't optimize sprintf (buf, "abc", ptr++). */
11880 if (orig)
11881 return NULL_TREE;
11883 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11884 'format' is known to contain no % formats. */
11885 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11886 if (!ignored)
11887 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11890 /* If the format is "%s", use strcpy if the result isn't used. */
11891 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11893 tree fn;
11894 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11896 if (!fn)
11897 return NULL_TREE;
11899 /* Don't crash on sprintf (str1, "%s"). */
11900 if (!orig)
11901 return NULL_TREE;
11903 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11904 if (!ignored)
11906 retval = c_strlen (orig, 1);
11907 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11908 return NULL_TREE;
11910 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11913 if (call && retval)
11915 retval = fold_convert_loc
11916 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11917 retval);
11918 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11920 else
11921 return call;
11924 /* Expand a call EXP to __builtin_object_size. */
11927 expand_builtin_object_size (tree exp)
11929 tree ost;
11930 int object_size_type;
11931 tree fndecl = get_callee_fndecl (exp);
11933 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11935 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11936 exp, fndecl);
11937 expand_builtin_trap ();
11938 return const0_rtx;
11941 ost = CALL_EXPR_ARG (exp, 1);
11942 STRIP_NOPS (ost);
11944 if (TREE_CODE (ost) != INTEGER_CST
11945 || tree_int_cst_sgn (ost) < 0
11946 || compare_tree_int (ost, 3) > 0)
11948 error ("%Klast argument of %D is not integer constant between 0 and 3",
11949 exp, fndecl);
11950 expand_builtin_trap ();
11951 return const0_rtx;
11954 object_size_type = tree_low_cst (ost, 0);
11956 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11959 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11960 FCODE is the BUILT_IN_* to use.
11961 Return NULL_RTX if we failed; the caller should emit a normal call,
11962 otherwise try to get the result in TARGET, if convenient (and in
11963 mode MODE if that's convenient). */
11965 static rtx
11966 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11967 enum built_in_function fcode)
11969 tree dest, src, len, size;
11971 if (!validate_arglist (exp,
11972 POINTER_TYPE,
11973 fcode == BUILT_IN_MEMSET_CHK
11974 ? INTEGER_TYPE : POINTER_TYPE,
11975 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11976 return NULL_RTX;
11978 dest = CALL_EXPR_ARG (exp, 0);
11979 src = CALL_EXPR_ARG (exp, 1);
11980 len = CALL_EXPR_ARG (exp, 2);
11981 size = CALL_EXPR_ARG (exp, 3);
11983 if (! host_integerp (size, 1))
11984 return NULL_RTX;
11986 if (host_integerp (len, 1) || integer_all_onesp (size))
11988 tree fn;
11990 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11992 warning_at (tree_nonartificial_location (exp),
11993 0, "%Kcall to %D will always overflow destination buffer",
11994 exp, get_callee_fndecl (exp));
11995 return NULL_RTX;
11998 fn = NULL_TREE;
11999 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12000 mem{cpy,pcpy,move,set} is available. */
12001 switch (fcode)
12003 case BUILT_IN_MEMCPY_CHK:
12004 fn = built_in_decls[BUILT_IN_MEMCPY];
12005 break;
12006 case BUILT_IN_MEMPCPY_CHK:
12007 fn = built_in_decls[BUILT_IN_MEMPCPY];
12008 break;
12009 case BUILT_IN_MEMMOVE_CHK:
12010 fn = built_in_decls[BUILT_IN_MEMMOVE];
12011 break;
12012 case BUILT_IN_MEMSET_CHK:
12013 fn = built_in_decls[BUILT_IN_MEMSET];
12014 break;
12015 default:
12016 break;
12019 if (! fn)
12020 return NULL_RTX;
12022 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12023 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12024 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12025 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12027 else if (fcode == BUILT_IN_MEMSET_CHK)
12028 return NULL_RTX;
12029 else
12031 unsigned int dest_align
12032 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
12034 /* If DEST is not a pointer type, call the normal function. */
12035 if (dest_align == 0)
12036 return NULL_RTX;
12038 /* If SRC and DEST are the same (and not volatile), do nothing. */
12039 if (operand_equal_p (src, dest, 0))
12041 tree expr;
12043 if (fcode != BUILT_IN_MEMPCPY_CHK)
12045 /* Evaluate and ignore LEN in case it has side-effects. */
12046 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12047 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12050 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
12051 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12054 /* __memmove_chk special case. */
12055 if (fcode == BUILT_IN_MEMMOVE_CHK)
12057 unsigned int src_align
12058 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
12060 if (src_align == 0)
12061 return NULL_RTX;
12063 /* If src is categorized for a readonly section we can use
12064 normal __memcpy_chk. */
12065 if (readonly_data_expr (src))
12067 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12068 if (!fn)
12069 return NULL_RTX;
12070 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12071 dest, src, len, size);
12072 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12073 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12074 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12077 return NULL_RTX;
12081 /* Emit warning if a buffer overflow is detected at compile time. */
12083 static void
12084 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12086 int is_strlen = 0;
12087 tree len, size;
12088 location_t loc = tree_nonartificial_location (exp);
12090 switch (fcode)
12092 case BUILT_IN_STRCPY_CHK:
12093 case BUILT_IN_STPCPY_CHK:
12094 /* For __strcat_chk the warning will be emitted only if overflowing
12095 by at least strlen (dest) + 1 bytes. */
12096 case BUILT_IN_STRCAT_CHK:
12097 len = CALL_EXPR_ARG (exp, 1);
12098 size = CALL_EXPR_ARG (exp, 2);
12099 is_strlen = 1;
12100 break;
12101 case BUILT_IN_STRNCAT_CHK:
12102 case BUILT_IN_STRNCPY_CHK:
12103 len = CALL_EXPR_ARG (exp, 2);
12104 size = CALL_EXPR_ARG (exp, 3);
12105 break;
12106 case BUILT_IN_SNPRINTF_CHK:
12107 case BUILT_IN_VSNPRINTF_CHK:
12108 len = CALL_EXPR_ARG (exp, 1);
12109 size = CALL_EXPR_ARG (exp, 3);
12110 break;
12111 default:
12112 gcc_unreachable ();
12115 if (!len || !size)
12116 return;
12118 if (! host_integerp (size, 1) || integer_all_onesp (size))
12119 return;
12121 if (is_strlen)
12123 len = c_strlen (len, 1);
12124 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12125 return;
12127 else if (fcode == BUILT_IN_STRNCAT_CHK)
12129 tree src = CALL_EXPR_ARG (exp, 1);
12130 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
12131 return;
12132 src = c_strlen (src, 1);
12133 if (! src || ! host_integerp (src, 1))
12135 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12136 exp, get_callee_fndecl (exp));
12137 return;
12139 else if (tree_int_cst_lt (src, size))
12140 return;
12142 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
12143 return;
12145 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12146 exp, get_callee_fndecl (exp));
12149 /* Emit warning if a buffer overflow is detected at compile time
12150 in __sprintf_chk/__vsprintf_chk calls. */
12152 static void
12153 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12155 tree size, len, fmt;
12156 const char *fmt_str;
12157 int nargs = call_expr_nargs (exp);
12159 /* Verify the required arguments in the original call. */
12161 if (nargs < 4)
12162 return;
12163 size = CALL_EXPR_ARG (exp, 2);
12164 fmt = CALL_EXPR_ARG (exp, 3);
12166 if (! host_integerp (size, 1) || integer_all_onesp (size))
12167 return;
12169 /* Check whether the format is a literal string constant. */
12170 fmt_str = c_getstr (fmt);
12171 if (fmt_str == NULL)
12172 return;
12174 if (!init_target_chars ())
12175 return;
12177 /* If the format doesn't contain % args or %%, we know its size. */
12178 if (strchr (fmt_str, target_percent) == 0)
12179 len = build_int_cstu (size_type_node, strlen (fmt_str));
12180 /* If the format is "%s" and first ... argument is a string literal,
12181 we know it too. */
12182 else if (fcode == BUILT_IN_SPRINTF_CHK
12183 && strcmp (fmt_str, target_percent_s) == 0)
12185 tree arg;
12187 if (nargs < 5)
12188 return;
12189 arg = CALL_EXPR_ARG (exp, 4);
12190 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12191 return;
12193 len = c_strlen (arg, 1);
12194 if (!len || ! host_integerp (len, 1))
12195 return;
12197 else
12198 return;
12200 if (! tree_int_cst_lt (len, size))
12201 warning_at (tree_nonartificial_location (exp),
12202 0, "%Kcall to %D will always overflow destination buffer",
12203 exp, get_callee_fndecl (exp));
12206 /* Emit warning if a free is called with address of a variable. */
12208 static void
12209 maybe_emit_free_warning (tree exp)
12211 tree arg = CALL_EXPR_ARG (exp, 0);
12213 STRIP_NOPS (arg);
12214 if (TREE_CODE (arg) != ADDR_EXPR)
12215 return;
12217 arg = get_base_address (TREE_OPERAND (arg, 0));
12218 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12219 return;
12221 if (SSA_VAR_P (arg))
12222 warning_at (tree_nonartificial_location (exp),
12223 0, "%Kattempt to free a non-heap object %qD", exp, arg);
12224 else
12225 warning_at (tree_nonartificial_location (exp),
12226 0, "%Kattempt to free a non-heap object", exp);
12229 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12230 if possible. */
12232 tree
12233 fold_builtin_object_size (tree ptr, tree ost)
12235 unsigned HOST_WIDE_INT bytes;
12236 int object_size_type;
12238 if (!validate_arg (ptr, POINTER_TYPE)
12239 || !validate_arg (ost, INTEGER_TYPE))
12240 return NULL_TREE;
12242 STRIP_NOPS (ost);
12244 if (TREE_CODE (ost) != INTEGER_CST
12245 || tree_int_cst_sgn (ost) < 0
12246 || compare_tree_int (ost, 3) > 0)
12247 return NULL_TREE;
12249 object_size_type = tree_low_cst (ost, 0);
12251 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12252 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12253 and (size_t) 0 for types 2 and 3. */
12254 if (TREE_SIDE_EFFECTS (ptr))
12255 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12257 if (TREE_CODE (ptr) == ADDR_EXPR)
12259 bytes = compute_builtin_object_size (ptr, object_size_type);
12260 if (double_int_fits_to_tree_p (size_type_node,
12261 uhwi_to_double_int (bytes)))
12262 return build_int_cstu (size_type_node, bytes);
12264 else if (TREE_CODE (ptr) == SSA_NAME)
12266 /* If object size is not known yet, delay folding until
12267 later. Maybe subsequent passes will help determining
12268 it. */
12269 bytes = compute_builtin_object_size (ptr, object_size_type);
12270 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12271 && double_int_fits_to_tree_p (size_type_node,
12272 uhwi_to_double_int (bytes)))
12273 return build_int_cstu (size_type_node, bytes);
12276 return NULL_TREE;
12279 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12280 DEST, SRC, LEN, and SIZE are the arguments to the call.
12281 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12282 code of the builtin. If MAXLEN is not NULL, it is maximum length
12283 passed as third argument. */
12285 tree
12286 fold_builtin_memory_chk (location_t loc, tree fndecl,
12287 tree dest, tree src, tree len, tree size,
12288 tree maxlen, bool ignore,
12289 enum built_in_function fcode)
12291 tree fn;
12293 if (!validate_arg (dest, POINTER_TYPE)
12294 || !validate_arg (src,
12295 (fcode == BUILT_IN_MEMSET_CHK
12296 ? INTEGER_TYPE : POINTER_TYPE))
12297 || !validate_arg (len, INTEGER_TYPE)
12298 || !validate_arg (size, INTEGER_TYPE))
12299 return NULL_TREE;
12301 /* If SRC and DEST are the same (and not volatile), return DEST
12302 (resp. DEST+LEN for __mempcpy_chk). */
12303 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12305 if (fcode != BUILT_IN_MEMPCPY_CHK)
12306 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12307 dest, len);
12308 else
12310 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
12311 dest, len);
12312 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12316 if (! host_integerp (size, 1))
12317 return NULL_TREE;
12319 if (! integer_all_onesp (size))
12321 if (! host_integerp (len, 1))
12323 /* If LEN is not constant, try MAXLEN too.
12324 For MAXLEN only allow optimizing into non-_ocs function
12325 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12326 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12328 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12330 /* (void) __mempcpy_chk () can be optimized into
12331 (void) __memcpy_chk (). */
12332 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12333 if (!fn)
12334 return NULL_TREE;
12336 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12338 return NULL_TREE;
12341 else
12342 maxlen = len;
12344 if (tree_int_cst_lt (size, maxlen))
12345 return NULL_TREE;
12348 fn = NULL_TREE;
12349 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12350 mem{cpy,pcpy,move,set} is available. */
12351 switch (fcode)
12353 case BUILT_IN_MEMCPY_CHK:
12354 fn = built_in_decls[BUILT_IN_MEMCPY];
12355 break;
12356 case BUILT_IN_MEMPCPY_CHK:
12357 fn = built_in_decls[BUILT_IN_MEMPCPY];
12358 break;
12359 case BUILT_IN_MEMMOVE_CHK:
12360 fn = built_in_decls[BUILT_IN_MEMMOVE];
12361 break;
12362 case BUILT_IN_MEMSET_CHK:
12363 fn = built_in_decls[BUILT_IN_MEMSET];
12364 break;
12365 default:
12366 break;
12369 if (!fn)
12370 return NULL_TREE;
12372 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12375 /* Fold a call to the __st[rp]cpy_chk builtin.
12376 DEST, SRC, and SIZE are the arguments to the call.
12377 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12378 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12379 strings passed as second argument. */
12381 tree
12382 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12383 tree src, tree size,
12384 tree maxlen, bool ignore,
12385 enum built_in_function fcode)
12387 tree len, fn;
12389 if (!validate_arg (dest, POINTER_TYPE)
12390 || !validate_arg (src, POINTER_TYPE)
12391 || !validate_arg (size, INTEGER_TYPE))
12392 return NULL_TREE;
12394 /* If SRC and DEST are the same (and not volatile), return DEST. */
12395 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12396 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12398 if (! host_integerp (size, 1))
12399 return NULL_TREE;
12401 if (! integer_all_onesp (size))
12403 len = c_strlen (src, 1);
12404 if (! len || ! host_integerp (len, 1))
12406 /* If LEN is not constant, try MAXLEN too.
12407 For MAXLEN only allow optimizing into non-_ocs function
12408 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12409 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12411 if (fcode == BUILT_IN_STPCPY_CHK)
12413 if (! ignore)
12414 return NULL_TREE;
12416 /* If return value of __stpcpy_chk is ignored,
12417 optimize into __strcpy_chk. */
12418 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12419 if (!fn)
12420 return NULL_TREE;
12422 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12425 if (! len || TREE_SIDE_EFFECTS (len))
12426 return NULL_TREE;
12428 /* If c_strlen returned something, but not a constant,
12429 transform __strcpy_chk into __memcpy_chk. */
12430 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12431 if (!fn)
12432 return NULL_TREE;
12434 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12435 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12436 build_call_expr_loc (loc, fn, 4,
12437 dest, src, len, size));
12440 else
12441 maxlen = len;
12443 if (! tree_int_cst_lt (maxlen, size))
12444 return NULL_TREE;
12447 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12448 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12449 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12450 if (!fn)
12451 return NULL_TREE;
12453 return build_call_expr_loc (loc, fn, 2, dest, src);
12456 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12457 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12458 length passed as third argument. */
12460 tree
12461 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12462 tree len, tree size, tree maxlen)
12464 tree fn;
12466 if (!validate_arg (dest, POINTER_TYPE)
12467 || !validate_arg (src, POINTER_TYPE)
12468 || !validate_arg (len, INTEGER_TYPE)
12469 || !validate_arg (size, INTEGER_TYPE))
12470 return NULL_TREE;
12472 if (! host_integerp (size, 1))
12473 return NULL_TREE;
12475 if (! integer_all_onesp (size))
12477 if (! host_integerp (len, 1))
12479 /* If LEN is not constant, try MAXLEN too.
12480 For MAXLEN only allow optimizing into non-_ocs function
12481 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12482 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12483 return NULL_TREE;
12485 else
12486 maxlen = len;
12488 if (tree_int_cst_lt (size, maxlen))
12489 return NULL_TREE;
12492 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12493 fn = built_in_decls[BUILT_IN_STRNCPY];
12494 if (!fn)
12495 return NULL_TREE;
12497 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12500 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12501 are the arguments to the call. */
12503 static tree
12504 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12505 tree src, tree size)
12507 tree fn;
12508 const char *p;
12510 if (!validate_arg (dest, POINTER_TYPE)
12511 || !validate_arg (src, POINTER_TYPE)
12512 || !validate_arg (size, INTEGER_TYPE))
12513 return NULL_TREE;
12515 p = c_getstr (src);
12516 /* If the SRC parameter is "", return DEST. */
12517 if (p && *p == '\0')
12518 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12520 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12521 return NULL_TREE;
12523 /* If __builtin_strcat_chk is used, assume strcat is available. */
12524 fn = built_in_decls[BUILT_IN_STRCAT];
12525 if (!fn)
12526 return NULL_TREE;
12528 return build_call_expr_loc (loc, fn, 2, dest, src);
12531 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12532 LEN, and SIZE. */
12534 static tree
12535 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12536 tree dest, tree src, tree len, tree size)
12538 tree fn;
12539 const char *p;
12541 if (!validate_arg (dest, POINTER_TYPE)
12542 || !validate_arg (src, POINTER_TYPE)
12543 || !validate_arg (size, INTEGER_TYPE)
12544 || !validate_arg (size, INTEGER_TYPE))
12545 return NULL_TREE;
12547 p = c_getstr (src);
12548 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12549 if (p && *p == '\0')
12550 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12551 else if (integer_zerop (len))
12552 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12554 if (! host_integerp (size, 1))
12555 return NULL_TREE;
12557 if (! integer_all_onesp (size))
12559 tree src_len = c_strlen (src, 1);
12560 if (src_len
12561 && host_integerp (src_len, 1)
12562 && host_integerp (len, 1)
12563 && ! tree_int_cst_lt (len, src_len))
12565 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12566 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12567 if (!fn)
12568 return NULL_TREE;
12570 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12572 return NULL_TREE;
12575 /* If __builtin_strncat_chk is used, assume strncat is available. */
12576 fn = built_in_decls[BUILT_IN_STRNCAT];
12577 if (!fn)
12578 return NULL_TREE;
12580 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12583 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12584 Return NULL_TREE if a normal call should be emitted rather than
12585 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12586 or BUILT_IN_VSPRINTF_CHK. */
12588 static tree
12589 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12590 enum built_in_function fcode)
12592 tree dest, size, len, fn, fmt, flag;
12593 const char *fmt_str;
12595 /* Verify the required arguments in the original call. */
12596 if (nargs < 4)
12597 return NULL_TREE;
12598 dest = args[0];
12599 if (!validate_arg (dest, POINTER_TYPE))
12600 return NULL_TREE;
12601 flag = args[1];
12602 if (!validate_arg (flag, INTEGER_TYPE))
12603 return NULL_TREE;
12604 size = args[2];
12605 if (!validate_arg (size, INTEGER_TYPE))
12606 return NULL_TREE;
12607 fmt = args[3];
12608 if (!validate_arg (fmt, POINTER_TYPE))
12609 return NULL_TREE;
12611 if (! host_integerp (size, 1))
12612 return NULL_TREE;
12614 len = NULL_TREE;
12616 if (!init_target_chars ())
12617 return NULL_TREE;
12619 /* Check whether the format is a literal string constant. */
12620 fmt_str = c_getstr (fmt);
12621 if (fmt_str != NULL)
12623 /* If the format doesn't contain % args or %%, we know the size. */
12624 if (strchr (fmt_str, target_percent) == 0)
12626 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12627 len = build_int_cstu (size_type_node, strlen (fmt_str));
12629 /* If the format is "%s" and first ... argument is a string literal,
12630 we know the size too. */
12631 else if (fcode == BUILT_IN_SPRINTF_CHK
12632 && strcmp (fmt_str, target_percent_s) == 0)
12634 tree arg;
12636 if (nargs == 5)
12638 arg = args[4];
12639 if (validate_arg (arg, POINTER_TYPE))
12641 len = c_strlen (arg, 1);
12642 if (! len || ! host_integerp (len, 1))
12643 len = NULL_TREE;
12649 if (! integer_all_onesp (size))
12651 if (! len || ! tree_int_cst_lt (len, size))
12652 return NULL_TREE;
12655 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12656 or if format doesn't contain % chars or is "%s". */
12657 if (! integer_zerop (flag))
12659 if (fmt_str == NULL)
12660 return NULL_TREE;
12661 if (strchr (fmt_str, target_percent) != NULL
12662 && strcmp (fmt_str, target_percent_s))
12663 return NULL_TREE;
12666 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12667 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12668 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12669 if (!fn)
12670 return NULL_TREE;
12672 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
12675 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12676 a normal call should be emitted rather than expanding the function
12677 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12679 static tree
12680 fold_builtin_sprintf_chk (location_t loc, tree exp,
12681 enum built_in_function fcode)
12683 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
12684 CALL_EXPR_ARGP (exp), fcode);
12687 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
12688 NULL_TREE if a normal call should be emitted rather than expanding
12689 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12690 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12691 passed as second argument. */
12693 static tree
12694 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
12695 tree maxlen, enum built_in_function fcode)
12697 tree dest, size, len, fn, fmt, flag;
12698 const char *fmt_str;
12700 /* Verify the required arguments in the original call. */
12701 if (nargs < 5)
12702 return NULL_TREE;
12703 dest = args[0];
12704 if (!validate_arg (dest, POINTER_TYPE))
12705 return NULL_TREE;
12706 len = args[1];
12707 if (!validate_arg (len, INTEGER_TYPE))
12708 return NULL_TREE;
12709 flag = args[2];
12710 if (!validate_arg (flag, INTEGER_TYPE))
12711 return NULL_TREE;
12712 size = args[3];
12713 if (!validate_arg (size, INTEGER_TYPE))
12714 return NULL_TREE;
12715 fmt = args[4];
12716 if (!validate_arg (fmt, POINTER_TYPE))
12717 return NULL_TREE;
12719 if (! host_integerp (size, 1))
12720 return NULL_TREE;
12722 if (! integer_all_onesp (size))
12724 if (! host_integerp (len, 1))
12726 /* If LEN is not constant, try MAXLEN too.
12727 For MAXLEN only allow optimizing into non-_ocs function
12728 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12729 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12730 return NULL_TREE;
12732 else
12733 maxlen = len;
12735 if (tree_int_cst_lt (size, maxlen))
12736 return NULL_TREE;
12739 if (!init_target_chars ())
12740 return NULL_TREE;
12742 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12743 or if format doesn't contain % chars or is "%s". */
12744 if (! integer_zerop (flag))
12746 fmt_str = c_getstr (fmt);
12747 if (fmt_str == NULL)
12748 return NULL_TREE;
12749 if (strchr (fmt_str, target_percent) != NULL
12750 && strcmp (fmt_str, target_percent_s))
12751 return NULL_TREE;
12754 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12755 available. */
12756 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12757 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12758 if (!fn)
12759 return NULL_TREE;
12761 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
12764 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12765 a normal call should be emitted rather than expanding the function
12766 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12767 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12768 passed as second argument. */
12770 tree
12771 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12772 enum built_in_function fcode)
12774 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
12775 CALL_EXPR_ARGP (exp), maxlen, fcode);
12778 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12779 FMT and ARG are the arguments to the call; we don't fold cases with
12780 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12782 Return NULL_TREE if no simplification was possible, otherwise return the
12783 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12784 code of the function to be simplified. */
12786 static tree
12787 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12788 tree arg, bool ignore,
12789 enum built_in_function fcode)
12791 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12792 const char *fmt_str = NULL;
12794 /* If the return value is used, don't do the transformation. */
12795 if (! ignore)
12796 return NULL_TREE;
12798 /* Verify the required arguments in the original call. */
12799 if (!validate_arg (fmt, POINTER_TYPE))
12800 return NULL_TREE;
12802 /* Check whether the format is a literal string constant. */
12803 fmt_str = c_getstr (fmt);
12804 if (fmt_str == NULL)
12805 return NULL_TREE;
12807 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12809 /* If we're using an unlocked function, assume the other
12810 unlocked functions exist explicitly. */
12811 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12812 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12814 else
12816 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12817 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12820 if (!init_target_chars ())
12821 return NULL_TREE;
12823 if (strcmp (fmt_str, target_percent_s) == 0
12824 || strchr (fmt_str, target_percent) == NULL)
12826 const char *str;
12828 if (strcmp (fmt_str, target_percent_s) == 0)
12830 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12831 return NULL_TREE;
12833 if (!arg || !validate_arg (arg, POINTER_TYPE))
12834 return NULL_TREE;
12836 str = c_getstr (arg);
12837 if (str == NULL)
12838 return NULL_TREE;
12840 else
12842 /* The format specifier doesn't contain any '%' characters. */
12843 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12844 && arg)
12845 return NULL_TREE;
12846 str = fmt_str;
12849 /* If the string was "", printf does nothing. */
12850 if (str[0] == '\0')
12851 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12853 /* If the string has length of 1, call putchar. */
12854 if (str[1] == '\0')
12856 /* Given printf("c"), (where c is any one character,)
12857 convert "c"[0] to an int and pass that to the replacement
12858 function. */
12859 newarg = build_int_cst (NULL_TREE, str[0]);
12860 if (fn_putchar)
12861 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12863 else
12865 /* If the string was "string\n", call puts("string"). */
12866 size_t len = strlen (str);
12867 if ((unsigned char)str[len - 1] == target_newline
12868 && (size_t) (int) len == len
12869 && (int) len > 0)
12871 char *newstr;
12872 tree offset_node, string_cst;
12874 /* Create a NUL-terminated string that's one char shorter
12875 than the original, stripping off the trailing '\n'. */
12876 newarg = build_string_literal (len, str);
12877 string_cst = string_constant (newarg, &offset_node);
12878 gcc_checking_assert (string_cst
12879 && (TREE_STRING_LENGTH (string_cst)
12880 == (int) len)
12881 && integer_zerop (offset_node)
12882 && (unsigned char)
12883 TREE_STRING_POINTER (string_cst)[len - 1]
12884 == target_newline);
12885 /* build_string_literal creates a new STRING_CST,
12886 modify it in place to avoid double copying. */
12887 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
12888 newstr[len - 1] = '\0';
12889 if (fn_puts)
12890 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12892 else
12893 /* We'd like to arrange to call fputs(string,stdout) here,
12894 but we need stdout and don't have a way to get it yet. */
12895 return NULL_TREE;
12899 /* The other optimizations can be done only on the non-va_list variants. */
12900 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12901 return NULL_TREE;
12903 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12904 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12906 if (!arg || !validate_arg (arg, POINTER_TYPE))
12907 return NULL_TREE;
12908 if (fn_puts)
12909 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12912 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12913 else if (strcmp (fmt_str, target_percent_c) == 0)
12915 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12916 return NULL_TREE;
12917 if (fn_putchar)
12918 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12921 if (!call)
12922 return NULL_TREE;
12924 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12927 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12928 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12929 more than 3 arguments, and ARG may be null in the 2-argument case.
12931 Return NULL_TREE if no simplification was possible, otherwise return the
12932 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12933 code of the function to be simplified. */
12935 static tree
12936 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12937 tree fmt, tree arg, bool ignore,
12938 enum built_in_function fcode)
12940 tree fn_fputc, fn_fputs, call = NULL_TREE;
12941 const char *fmt_str = NULL;
12943 /* If the return value is used, don't do the transformation. */
12944 if (! ignore)
12945 return NULL_TREE;
12947 /* Verify the required arguments in the original call. */
12948 if (!validate_arg (fp, POINTER_TYPE))
12949 return NULL_TREE;
12950 if (!validate_arg (fmt, POINTER_TYPE))
12951 return NULL_TREE;
12953 /* Check whether the format is a literal string constant. */
12954 fmt_str = c_getstr (fmt);
12955 if (fmt_str == NULL)
12956 return NULL_TREE;
12958 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12960 /* If we're using an unlocked function, assume the other
12961 unlocked functions exist explicitly. */
12962 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12963 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12965 else
12967 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12968 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12971 if (!init_target_chars ())
12972 return NULL_TREE;
12974 /* If the format doesn't contain % args or %%, use strcpy. */
12975 if (strchr (fmt_str, target_percent) == NULL)
12977 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12978 && arg)
12979 return NULL_TREE;
12981 /* If the format specifier was "", fprintf does nothing. */
12982 if (fmt_str[0] == '\0')
12984 /* If FP has side-effects, just wait until gimplification is
12985 done. */
12986 if (TREE_SIDE_EFFECTS (fp))
12987 return NULL_TREE;
12989 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12992 /* When "string" doesn't contain %, replace all cases of
12993 fprintf (fp, string) with fputs (string, fp). The fputs
12994 builtin will take care of special cases like length == 1. */
12995 if (fn_fputs)
12996 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12999 /* The other optimizations can be done only on the non-va_list variants. */
13000 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13001 return NULL_TREE;
13003 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13004 else if (strcmp (fmt_str, target_percent_s) == 0)
13006 if (!arg || !validate_arg (arg, POINTER_TYPE))
13007 return NULL_TREE;
13008 if (fn_fputs)
13009 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13012 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13013 else if (strcmp (fmt_str, target_percent_c) == 0)
13015 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13016 return NULL_TREE;
13017 if (fn_fputc)
13018 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13021 if (!call)
13022 return NULL_TREE;
13023 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13026 /* Initialize format string characters in the target charset. */
13028 static bool
13029 init_target_chars (void)
13031 static bool init;
13032 if (!init)
13034 target_newline = lang_hooks.to_target_charset ('\n');
13035 target_percent = lang_hooks.to_target_charset ('%');
13036 target_c = lang_hooks.to_target_charset ('c');
13037 target_s = lang_hooks.to_target_charset ('s');
13038 if (target_newline == 0 || target_percent == 0 || target_c == 0
13039 || target_s == 0)
13040 return false;
13042 target_percent_c[0] = target_percent;
13043 target_percent_c[1] = target_c;
13044 target_percent_c[2] = '\0';
13046 target_percent_s[0] = target_percent;
13047 target_percent_s[1] = target_s;
13048 target_percent_s[2] = '\0';
13050 target_percent_s_newline[0] = target_percent;
13051 target_percent_s_newline[1] = target_s;
13052 target_percent_s_newline[2] = target_newline;
13053 target_percent_s_newline[3] = '\0';
13055 init = true;
13057 return true;
13060 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13061 and no overflow/underflow occurred. INEXACT is true if M was not
13062 exactly calculated. TYPE is the tree type for the result. This
13063 function assumes that you cleared the MPFR flags and then
13064 calculated M to see if anything subsequently set a flag prior to
13065 entering this function. Return NULL_TREE if any checks fail. */
13067 static tree
13068 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13070 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13071 overflow/underflow occurred. If -frounding-math, proceed iff the
13072 result of calling FUNC was exact. */
13073 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13074 && (!flag_rounding_math || !inexact))
13076 REAL_VALUE_TYPE rr;
13078 real_from_mpfr (&rr, m, type, GMP_RNDN);
13079 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13080 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13081 but the mpft_t is not, then we underflowed in the
13082 conversion. */
13083 if (real_isfinite (&rr)
13084 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13086 REAL_VALUE_TYPE rmode;
13088 real_convert (&rmode, TYPE_MODE (type), &rr);
13089 /* Proceed iff the specified mode can hold the value. */
13090 if (real_identical (&rmode, &rr))
13091 return build_real (type, rmode);
13094 return NULL_TREE;
13097 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13098 number and no overflow/underflow occurred. INEXACT is true if M
13099 was not exactly calculated. TYPE is the tree type for the result.
13100 This function assumes that you cleared the MPFR flags and then
13101 calculated M to see if anything subsequently set a flag prior to
13102 entering this function. Return NULL_TREE if any checks fail, if
13103 FORCE_CONVERT is true, then bypass the checks. */
13105 static tree
13106 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13108 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13109 overflow/underflow occurred. If -frounding-math, proceed iff the
13110 result of calling FUNC was exact. */
13111 if (force_convert
13112 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13113 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13114 && (!flag_rounding_math || !inexact)))
13116 REAL_VALUE_TYPE re, im;
13118 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13119 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13120 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13121 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13122 but the mpft_t is not, then we underflowed in the
13123 conversion. */
13124 if (force_convert
13125 || (real_isfinite (&re) && real_isfinite (&im)
13126 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13127 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13129 REAL_VALUE_TYPE re_mode, im_mode;
13131 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13132 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13133 /* Proceed iff the specified mode can hold the value. */
13134 if (force_convert
13135 || (real_identical (&re_mode, &re)
13136 && real_identical (&im_mode, &im)))
13137 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13138 build_real (TREE_TYPE (type), im_mode));
13141 return NULL_TREE;
13144 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13145 FUNC on it and return the resulting value as a tree with type TYPE.
13146 If MIN and/or MAX are not NULL, then the supplied ARG must be
13147 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13148 acceptable values, otherwise they are not. The mpfr precision is
13149 set to the precision of TYPE. We assume that function FUNC returns
13150 zero if the result could be calculated exactly within the requested
13151 precision. */
13153 static tree
13154 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13155 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13156 bool inclusive)
13158 tree result = NULL_TREE;
13160 STRIP_NOPS (arg);
13162 /* To proceed, MPFR must exactly represent the target floating point
13163 format, which only happens when the target base equals two. */
13164 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13165 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13167 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13169 if (real_isfinite (ra)
13170 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13171 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13173 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13174 const int prec = fmt->p;
13175 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13176 int inexact;
13177 mpfr_t m;
13179 mpfr_init2 (m, prec);
13180 mpfr_from_real (m, ra, GMP_RNDN);
13181 mpfr_clear_flags ();
13182 inexact = func (m, m, rnd);
13183 result = do_mpfr_ckconv (m, type, inexact);
13184 mpfr_clear (m);
13188 return result;
13191 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13192 FUNC on it and return the resulting value as a tree with type TYPE.
13193 The mpfr precision is set to the precision of TYPE. We assume that
13194 function FUNC returns zero if the result could be calculated
13195 exactly within the requested precision. */
13197 static tree
13198 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13199 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13201 tree result = NULL_TREE;
13203 STRIP_NOPS (arg1);
13204 STRIP_NOPS (arg2);
13206 /* To proceed, MPFR must exactly represent the target floating point
13207 format, which only happens when the target base equals two. */
13208 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13209 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13210 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13212 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13213 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13215 if (real_isfinite (ra1) && real_isfinite (ra2))
13217 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13218 const int prec = fmt->p;
13219 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13220 int inexact;
13221 mpfr_t m1, m2;
13223 mpfr_inits2 (prec, m1, m2, NULL);
13224 mpfr_from_real (m1, ra1, GMP_RNDN);
13225 mpfr_from_real (m2, ra2, GMP_RNDN);
13226 mpfr_clear_flags ();
13227 inexact = func (m1, m1, m2, rnd);
13228 result = do_mpfr_ckconv (m1, type, inexact);
13229 mpfr_clears (m1, m2, NULL);
13233 return result;
13236 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13237 FUNC on it and return the resulting value as a tree with type TYPE.
13238 The mpfr precision is set to the precision of TYPE. We assume that
13239 function FUNC returns zero if the result could be calculated
13240 exactly within the requested precision. */
13242 static tree
13243 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13244 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13246 tree result = NULL_TREE;
13248 STRIP_NOPS (arg1);
13249 STRIP_NOPS (arg2);
13250 STRIP_NOPS (arg3);
13252 /* To proceed, MPFR must exactly represent the target floating point
13253 format, which only happens when the target base equals two. */
13254 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13255 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13256 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13257 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13259 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13260 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13261 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13263 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13265 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13266 const int prec = fmt->p;
13267 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13268 int inexact;
13269 mpfr_t m1, m2, m3;
13271 mpfr_inits2 (prec, m1, m2, m3, NULL);
13272 mpfr_from_real (m1, ra1, GMP_RNDN);
13273 mpfr_from_real (m2, ra2, GMP_RNDN);
13274 mpfr_from_real (m3, ra3, GMP_RNDN);
13275 mpfr_clear_flags ();
13276 inexact = func (m1, m1, m2, m3, rnd);
13277 result = do_mpfr_ckconv (m1, type, inexact);
13278 mpfr_clears (m1, m2, m3, NULL);
13282 return result;
13285 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13286 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13287 If ARG_SINP and ARG_COSP are NULL then the result is returned
13288 as a complex value.
13289 The type is taken from the type of ARG and is used for setting the
13290 precision of the calculation and results. */
13292 static tree
13293 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13295 tree const type = TREE_TYPE (arg);
13296 tree result = NULL_TREE;
13298 STRIP_NOPS (arg);
13300 /* To proceed, MPFR must exactly represent the target floating point
13301 format, which only happens when the target base equals two. */
13302 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13303 && TREE_CODE (arg) == REAL_CST
13304 && !TREE_OVERFLOW (arg))
13306 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13308 if (real_isfinite (ra))
13310 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13311 const int prec = fmt->p;
13312 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13313 tree result_s, result_c;
13314 int inexact;
13315 mpfr_t m, ms, mc;
13317 mpfr_inits2 (prec, m, ms, mc, NULL);
13318 mpfr_from_real (m, ra, GMP_RNDN);
13319 mpfr_clear_flags ();
13320 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13321 result_s = do_mpfr_ckconv (ms, type, inexact);
13322 result_c = do_mpfr_ckconv (mc, type, inexact);
13323 mpfr_clears (m, ms, mc, NULL);
13324 if (result_s && result_c)
13326 /* If we are to return in a complex value do so. */
13327 if (!arg_sinp && !arg_cosp)
13328 return build_complex (build_complex_type (type),
13329 result_c, result_s);
13331 /* Dereference the sin/cos pointer arguments. */
13332 arg_sinp = build_fold_indirect_ref (arg_sinp);
13333 arg_cosp = build_fold_indirect_ref (arg_cosp);
13334 /* Proceed if valid pointer type were passed in. */
13335 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13336 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13338 /* Set the values. */
13339 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13340 result_s);
13341 TREE_SIDE_EFFECTS (result_s) = 1;
13342 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13343 result_c);
13344 TREE_SIDE_EFFECTS (result_c) = 1;
13345 /* Combine the assignments into a compound expr. */
13346 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13347 result_s, result_c));
13352 return result;
13355 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13356 two-argument mpfr order N Bessel function FUNC on them and return
13357 the resulting value as a tree with type TYPE. The mpfr precision
13358 is set to the precision of TYPE. We assume that function FUNC
13359 returns zero if the result could be calculated exactly within the
13360 requested precision. */
13361 static tree
13362 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13363 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13364 const REAL_VALUE_TYPE *min, bool inclusive)
13366 tree result = NULL_TREE;
13368 STRIP_NOPS (arg1);
13369 STRIP_NOPS (arg2);
13371 /* To proceed, MPFR must exactly represent the target floating point
13372 format, which only happens when the target base equals two. */
13373 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13374 && host_integerp (arg1, 0)
13375 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13377 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13378 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13380 if (n == (long)n
13381 && real_isfinite (ra)
13382 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13384 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13385 const int prec = fmt->p;
13386 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13387 int inexact;
13388 mpfr_t m;
13390 mpfr_init2 (m, prec);
13391 mpfr_from_real (m, ra, GMP_RNDN);
13392 mpfr_clear_flags ();
13393 inexact = func (m, n, m, rnd);
13394 result = do_mpfr_ckconv (m, type, inexact);
13395 mpfr_clear (m);
13399 return result;
13402 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13403 the pointer *(ARG_QUO) and return the result. The type is taken
13404 from the type of ARG0 and is used for setting the precision of the
13405 calculation and results. */
13407 static tree
13408 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13410 tree const type = TREE_TYPE (arg0);
13411 tree result = NULL_TREE;
13413 STRIP_NOPS (arg0);
13414 STRIP_NOPS (arg1);
13416 /* To proceed, MPFR must exactly represent the target floating point
13417 format, which only happens when the target base equals two. */
13418 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13419 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13420 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13422 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13423 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13425 if (real_isfinite (ra0) && real_isfinite (ra1))
13427 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13428 const int prec = fmt->p;
13429 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13430 tree result_rem;
13431 long integer_quo;
13432 mpfr_t m0, m1;
13434 mpfr_inits2 (prec, m0, m1, NULL);
13435 mpfr_from_real (m0, ra0, GMP_RNDN);
13436 mpfr_from_real (m1, ra1, GMP_RNDN);
13437 mpfr_clear_flags ();
13438 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13439 /* Remquo is independent of the rounding mode, so pass
13440 inexact=0 to do_mpfr_ckconv(). */
13441 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13442 mpfr_clears (m0, m1, NULL);
13443 if (result_rem)
13445 /* MPFR calculates quo in the host's long so it may
13446 return more bits in quo than the target int can hold
13447 if sizeof(host long) > sizeof(target int). This can
13448 happen even for native compilers in LP64 mode. In
13449 these cases, modulo the quo value with the largest
13450 number that the target int can hold while leaving one
13451 bit for the sign. */
13452 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13453 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13455 /* Dereference the quo pointer argument. */
13456 arg_quo = build_fold_indirect_ref (arg_quo);
13457 /* Proceed iff a valid pointer type was passed in. */
13458 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13460 /* Set the value. */
13461 tree result_quo = fold_build2 (MODIFY_EXPR,
13462 TREE_TYPE (arg_quo), arg_quo,
13463 build_int_cst (NULL, integer_quo));
13464 TREE_SIDE_EFFECTS (result_quo) = 1;
13465 /* Combine the quo assignment with the rem. */
13466 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13467 result_quo, result_rem));
13472 return result;
13475 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13476 resulting value as a tree with type TYPE. The mpfr precision is
13477 set to the precision of TYPE. We assume that this mpfr function
13478 returns zero if the result could be calculated exactly within the
13479 requested precision. In addition, the integer pointer represented
13480 by ARG_SG will be dereferenced and set to the appropriate signgam
13481 (-1,1) value. */
13483 static tree
13484 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13486 tree result = NULL_TREE;
13488 STRIP_NOPS (arg);
13490 /* To proceed, MPFR must exactly represent the target floating point
13491 format, which only happens when the target base equals two. Also
13492 verify ARG is a constant and that ARG_SG is an int pointer. */
13493 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13494 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13495 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13496 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13498 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13500 /* In addition to NaN and Inf, the argument cannot be zero or a
13501 negative integer. */
13502 if (real_isfinite (ra)
13503 && ra->cl != rvc_zero
13504 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13506 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13507 const int prec = fmt->p;
13508 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13509 int inexact, sg;
13510 mpfr_t m;
13511 tree result_lg;
13513 mpfr_init2 (m, prec);
13514 mpfr_from_real (m, ra, GMP_RNDN);
13515 mpfr_clear_flags ();
13516 inexact = mpfr_lgamma (m, &sg, m, rnd);
13517 result_lg = do_mpfr_ckconv (m, type, inexact);
13518 mpfr_clear (m);
13519 if (result_lg)
13521 tree result_sg;
13523 /* Dereference the arg_sg pointer argument. */
13524 arg_sg = build_fold_indirect_ref (arg_sg);
13525 /* Assign the signgam value into *arg_sg. */
13526 result_sg = fold_build2 (MODIFY_EXPR,
13527 TREE_TYPE (arg_sg), arg_sg,
13528 build_int_cst (NULL, sg));
13529 TREE_SIDE_EFFECTS (result_sg) = 1;
13530 /* Combine the signgam assignment with the lgamma result. */
13531 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13532 result_sg, result_lg));
13537 return result;
13540 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13541 function FUNC on it and return the resulting value as a tree with
13542 type TYPE. The mpfr precision is set to the precision of TYPE. We
13543 assume that function FUNC returns zero if the result could be
13544 calculated exactly within the requested precision. */
13546 static tree
13547 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13549 tree result = NULL_TREE;
13551 STRIP_NOPS (arg);
13553 /* To proceed, MPFR must exactly represent the target floating point
13554 format, which only happens when the target base equals two. */
13555 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13556 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13557 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13559 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13560 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13562 if (real_isfinite (re) && real_isfinite (im))
13564 const struct real_format *const fmt =
13565 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13566 const int prec = fmt->p;
13567 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13568 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13569 int inexact;
13570 mpc_t m;
13572 mpc_init2 (m, prec);
13573 mpfr_from_real (mpc_realref(m), re, rnd);
13574 mpfr_from_real (mpc_imagref(m), im, rnd);
13575 mpfr_clear_flags ();
13576 inexact = func (m, m, crnd);
13577 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13578 mpc_clear (m);
13582 return result;
13585 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13586 mpc function FUNC on it and return the resulting value as a tree
13587 with type TYPE. The mpfr precision is set to the precision of
13588 TYPE. We assume that function FUNC returns zero if the result
13589 could be calculated exactly within the requested precision. If
13590 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13591 in the arguments and/or results. */
13593 tree
13594 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13595 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13597 tree result = NULL_TREE;
13599 STRIP_NOPS (arg0);
13600 STRIP_NOPS (arg1);
13602 /* To proceed, MPFR must exactly represent the target floating point
13603 format, which only happens when the target base equals two. */
13604 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13605 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13606 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13607 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13608 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13610 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13611 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13612 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13613 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13615 if (do_nonfinite
13616 || (real_isfinite (re0) && real_isfinite (im0)
13617 && real_isfinite (re1) && real_isfinite (im1)))
13619 const struct real_format *const fmt =
13620 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13621 const int prec = fmt->p;
13622 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13623 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13624 int inexact;
13625 mpc_t m0, m1;
13627 mpc_init2 (m0, prec);
13628 mpc_init2 (m1, prec);
13629 mpfr_from_real (mpc_realref(m0), re0, rnd);
13630 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13631 mpfr_from_real (mpc_realref(m1), re1, rnd);
13632 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13633 mpfr_clear_flags ();
13634 inexact = func (m0, m0, m1, crnd);
13635 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13636 mpc_clear (m0);
13637 mpc_clear (m1);
13641 return result;
13644 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13645 a normal call should be emitted rather than expanding the function
13646 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13648 static tree
13649 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13651 int nargs = gimple_call_num_args (stmt);
13653 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
13654 (nargs > 0
13655 ? gimple_call_arg_ptr (stmt, 0)
13656 : &error_mark_node), fcode);
13659 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13660 a normal call should be emitted rather than expanding the function
13661 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13662 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13663 passed as second argument. */
13665 tree
13666 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13667 enum built_in_function fcode)
13669 int nargs = gimple_call_num_args (stmt);
13671 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
13672 (nargs > 0
13673 ? gimple_call_arg_ptr (stmt, 0)
13674 : &error_mark_node), maxlen, fcode);
13677 /* Builtins with folding operations that operate on "..." arguments
13678 need special handling; we need to store the arguments in a convenient
13679 data structure before attempting any folding. Fortunately there are
13680 only a few builtins that fall into this category. FNDECL is the
13681 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13682 result of the function call is ignored. */
13684 static tree
13685 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13686 bool ignore ATTRIBUTE_UNUSED)
13688 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13689 tree ret = NULL_TREE;
13691 switch (fcode)
13693 case BUILT_IN_SPRINTF_CHK:
13694 case BUILT_IN_VSPRINTF_CHK:
13695 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13696 break;
13698 case BUILT_IN_SNPRINTF_CHK:
13699 case BUILT_IN_VSNPRINTF_CHK:
13700 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13702 default:
13703 break;
13705 if (ret)
13707 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13708 TREE_NO_WARNING (ret) = 1;
13709 return ret;
13711 return NULL_TREE;
13714 /* A wrapper function for builtin folding that prevents warnings for
13715 "statement without effect" and the like, caused by removing the
13716 call node earlier than the warning is generated. */
13718 tree
13719 fold_call_stmt (gimple stmt, bool ignore)
13721 tree ret = NULL_TREE;
13722 tree fndecl = gimple_call_fndecl (stmt);
13723 location_t loc = gimple_location (stmt);
13724 if (fndecl
13725 && TREE_CODE (fndecl) == FUNCTION_DECL
13726 && DECL_BUILT_IN (fndecl)
13727 && !gimple_call_va_arg_pack_p (stmt))
13729 int nargs = gimple_call_num_args (stmt);
13730 tree *args = (nargs > 0
13731 ? gimple_call_arg_ptr (stmt, 0)
13732 : &error_mark_node);
13734 if (avoid_folding_inline_builtin (fndecl))
13735 return NULL_TREE;
13736 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13738 return targetm.fold_builtin (fndecl, nargs, args, ignore);
13740 else
13742 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13743 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13744 if (!ret)
13745 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13746 if (ret)
13748 /* Propagate location information from original call to
13749 expansion of builtin. Otherwise things like
13750 maybe_emit_chk_warning, that operate on the expansion
13751 of a builtin, will use the wrong location information. */
13752 if (gimple_has_location (stmt))
13754 tree realret = ret;
13755 if (TREE_CODE (ret) == NOP_EXPR)
13756 realret = TREE_OPERAND (ret, 0);
13757 if (CAN_HAVE_LOCATION_P (realret)
13758 && !EXPR_HAS_LOCATION (realret))
13759 SET_EXPR_LOCATION (realret, loc);
13760 return realret;
13762 return ret;
13766 return NULL_TREE;
13769 /* Look up the function in built_in_decls that corresponds to DECL
13770 and set ASMSPEC as its user assembler name. DECL must be a
13771 function decl that declares a builtin. */
13773 void
13774 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13776 tree builtin;
13777 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13778 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13779 && asmspec != 0);
13781 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13782 set_user_assembler_name (builtin, asmspec);
13783 switch (DECL_FUNCTION_CODE (decl))
13785 case BUILT_IN_MEMCPY:
13786 init_block_move_fn (asmspec);
13787 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13788 break;
13789 case BUILT_IN_MEMSET:
13790 init_block_clear_fn (asmspec);
13791 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13792 break;
13793 case BUILT_IN_MEMMOVE:
13794 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13795 break;
13796 case BUILT_IN_MEMCMP:
13797 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13798 break;
13799 case BUILT_IN_ABORT:
13800 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13801 break;
13802 case BUILT_IN_FFS:
13803 if (INT_TYPE_SIZE < BITS_PER_WORD)
13805 set_user_assembler_libfunc ("ffs", asmspec);
13806 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13807 MODE_INT, 0), "ffs");
13809 break;
13810 default:
13811 break;
13815 /* Return true if DECL is a builtin that expands to a constant or similarly
13816 simple code. */
13817 bool
13818 is_simple_builtin (tree decl)
13820 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13821 switch (DECL_FUNCTION_CODE (decl))
13823 /* Builtins that expand to constants. */
13824 case BUILT_IN_CONSTANT_P:
13825 case BUILT_IN_EXPECT:
13826 case BUILT_IN_OBJECT_SIZE:
13827 case BUILT_IN_UNREACHABLE:
13828 /* Simple register moves or loads from stack. */
13829 case BUILT_IN_RETURN_ADDRESS:
13830 case BUILT_IN_EXTRACT_RETURN_ADDR:
13831 case BUILT_IN_FROB_RETURN_ADDR:
13832 case BUILT_IN_RETURN:
13833 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13834 case BUILT_IN_FRAME_ADDRESS:
13835 case BUILT_IN_VA_END:
13836 case BUILT_IN_STACK_SAVE:
13837 case BUILT_IN_STACK_RESTORE:
13838 /* Exception state returns or moves registers around. */
13839 case BUILT_IN_EH_FILTER:
13840 case BUILT_IN_EH_POINTER:
13841 case BUILT_IN_EH_COPY_VALUES:
13842 return true;
13844 default:
13845 return false;
13848 return false;
13851 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13852 most probably expanded inline into reasonably simple code. This is a
13853 superset of is_simple_builtin. */
13854 bool
13855 is_inexpensive_builtin (tree decl)
13857 if (!decl)
13858 return false;
13859 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13860 return true;
13861 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13862 switch (DECL_FUNCTION_CODE (decl))
13864 case BUILT_IN_ABS:
13865 case BUILT_IN_ALLOCA:
13866 case BUILT_IN_BSWAP32:
13867 case BUILT_IN_BSWAP64:
13868 case BUILT_IN_CLZ:
13869 case BUILT_IN_CLZIMAX:
13870 case BUILT_IN_CLZL:
13871 case BUILT_IN_CLZLL:
13872 case BUILT_IN_CTZ:
13873 case BUILT_IN_CTZIMAX:
13874 case BUILT_IN_CTZL:
13875 case BUILT_IN_CTZLL:
13876 case BUILT_IN_FFS:
13877 case BUILT_IN_FFSIMAX:
13878 case BUILT_IN_FFSL:
13879 case BUILT_IN_FFSLL:
13880 case BUILT_IN_IMAXABS:
13881 case BUILT_IN_FINITE:
13882 case BUILT_IN_FINITEF:
13883 case BUILT_IN_FINITEL:
13884 case BUILT_IN_FINITED32:
13885 case BUILT_IN_FINITED64:
13886 case BUILT_IN_FINITED128:
13887 case BUILT_IN_FPCLASSIFY:
13888 case BUILT_IN_ISFINITE:
13889 case BUILT_IN_ISINF_SIGN:
13890 case BUILT_IN_ISINF:
13891 case BUILT_IN_ISINFF:
13892 case BUILT_IN_ISINFL:
13893 case BUILT_IN_ISINFD32:
13894 case BUILT_IN_ISINFD64:
13895 case BUILT_IN_ISINFD128:
13896 case BUILT_IN_ISNAN:
13897 case BUILT_IN_ISNANF:
13898 case BUILT_IN_ISNANL:
13899 case BUILT_IN_ISNAND32:
13900 case BUILT_IN_ISNAND64:
13901 case BUILT_IN_ISNAND128:
13902 case BUILT_IN_ISNORMAL:
13903 case BUILT_IN_ISGREATER:
13904 case BUILT_IN_ISGREATEREQUAL:
13905 case BUILT_IN_ISLESS:
13906 case BUILT_IN_ISLESSEQUAL:
13907 case BUILT_IN_ISLESSGREATER:
13908 case BUILT_IN_ISUNORDERED:
13909 case BUILT_IN_VA_ARG_PACK:
13910 case BUILT_IN_VA_ARG_PACK_LEN:
13911 case BUILT_IN_VA_COPY:
13912 case BUILT_IN_TRAP:
13913 case BUILT_IN_SAVEREGS:
13914 case BUILT_IN_POPCOUNTL:
13915 case BUILT_IN_POPCOUNTLL:
13916 case BUILT_IN_POPCOUNTIMAX:
13917 case BUILT_IN_POPCOUNT:
13918 case BUILT_IN_PARITYL:
13919 case BUILT_IN_PARITYLL:
13920 case BUILT_IN_PARITYIMAX:
13921 case BUILT_IN_PARITY:
13922 case BUILT_IN_LABS:
13923 case BUILT_IN_LLABS:
13924 case BUILT_IN_PREFETCH:
13925 return true;
13927 default:
13928 return is_simple_builtin (decl);
13931 return false;