gcc:
[official-gcc.git] / gcc / builtins.c
blob1ee8cf80001db3ed90af651c65fbebf27cfda7d5
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "realmpfr.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "predict.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "langhooks.h"
47 #include "basic-block.h"
48 #include "tree-mudflap.h"
49 #include "tree-flow.h"
50 #include "value-prof.h"
51 #include "diagnostic-core.h"
52 #include "builtins.h"
55 #ifndef PAD_VARARGS_DOWN
56 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
57 #endif
58 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
60 struct target_builtins default_target_builtins;
61 #if SWITCHABLE_TARGET
62 struct target_builtins *this_target_builtins = &default_target_builtins;
63 #endif
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
74 #undef DEF_BUILTIN
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
92 #endif
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
106 static rtx expand_builtin_interclass_mathfn (tree, rtx);
107 static rtx expand_builtin_sincos (tree);
108 static rtx expand_builtin_cexpi (tree, rtx);
109 static rtx expand_builtin_int_roundingfn (tree, rtx);
110 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcmp (tree, rtx);
117 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_memcpy (tree, rtx);
120 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
122 enum machine_mode, int);
123 static rtx expand_builtin_strcpy (tree, rtx);
124 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
125 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strncpy (tree, rtx);
127 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
128 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
130 static rtx expand_builtin_bzero (tree);
131 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_alloca (tree, bool);
133 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
134 static rtx expand_builtin_frame_address (tree, tree);
135 static tree stabilize_va_list_loc (location_t, tree, int);
136 static rtx expand_builtin_expect (tree, rtx);
137 static tree fold_builtin_constant_p (tree);
138 static tree fold_builtin_expect (location_t, tree, tree);
139 static tree fold_builtin_classify_type (tree);
140 static tree fold_builtin_strlen (location_t, tree, tree);
141 static tree fold_builtin_inf (location_t, tree, int);
142 static tree fold_builtin_nan (tree, tree, int);
143 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
144 static bool validate_arg (const_tree, enum tree_code code);
145 static bool integer_valued_real_p (tree);
146 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
147 static bool readonly_data_expr (tree);
148 static rtx expand_builtin_fabs (tree, rtx, rtx);
149 static rtx expand_builtin_signbit (tree, rtx);
150 static tree fold_builtin_sqrt (location_t, tree, tree);
151 static tree fold_builtin_cbrt (location_t, tree, tree);
152 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
153 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
154 static tree fold_builtin_cos (location_t, tree, tree, tree);
155 static tree fold_builtin_cosh (location_t, tree, tree, tree);
156 static tree fold_builtin_tan (tree, tree);
157 static tree fold_builtin_trunc (location_t, tree, tree);
158 static tree fold_builtin_floor (location_t, tree, tree);
159 static tree fold_builtin_ceil (location_t, tree, tree);
160 static tree fold_builtin_round (location_t, tree, tree);
161 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
162 static tree fold_builtin_bitop (tree, tree);
163 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
164 static tree fold_builtin_strchr (location_t, tree, tree, tree);
165 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
167 static tree fold_builtin_strcmp (location_t, tree, tree);
168 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
169 static tree fold_builtin_signbit (location_t, tree, tree);
170 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_isascii (location_t, tree);
172 static tree fold_builtin_toascii (location_t, tree);
173 static tree fold_builtin_isdigit (location_t, tree);
174 static tree fold_builtin_fabs (location_t, tree, tree);
175 static tree fold_builtin_abs (location_t, tree, tree);
176 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
177 enum tree_code);
178 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
179 static tree fold_builtin_0 (location_t, tree, bool);
180 static tree fold_builtin_1 (location_t, tree, tree, bool);
181 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
182 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
183 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
184 static tree fold_builtin_varargs (location_t, tree, tree, bool);
186 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
187 static tree fold_builtin_strstr (location_t, tree, tree, tree);
188 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
189 static tree fold_builtin_strcat (location_t, tree, tree);
190 static tree fold_builtin_strncat (location_t, tree, tree, tree);
191 static tree fold_builtin_strspn (location_t, tree, tree);
192 static tree fold_builtin_strcspn (location_t, tree, tree);
193 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
194 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
196 static rtx expand_builtin_object_size (tree);
197 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
198 enum built_in_function);
199 static void maybe_emit_chk_warning (tree, enum built_in_function);
200 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
201 static void maybe_emit_free_warning (tree);
202 static tree fold_builtin_object_size (tree, tree);
203 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
204 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
205 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
206 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
207 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
208 enum built_in_function);
209 static bool init_target_chars (void);
211 static unsigned HOST_WIDE_INT target_newline;
212 static unsigned HOST_WIDE_INT target_percent;
213 static unsigned HOST_WIDE_INT target_c;
214 static unsigned HOST_WIDE_INT target_s;
215 static char target_percent_c[3];
216 static char target_percent_s[3];
217 static char target_percent_s_newline[4];
218 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
219 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
220 static tree do_mpfr_arg2 (tree, tree, tree,
221 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
222 static tree do_mpfr_arg3 (tree, tree, tree, tree,
223 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
224 static tree do_mpfr_sincos (tree, tree, tree);
225 static tree do_mpfr_bessel_n (tree, tree, tree,
226 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_remquo (tree, tree, tree);
229 static tree do_mpfr_lgamma_r (tree, tree, tree);
231 /* Return true if NAME starts with __builtin_ or __sync_. */
233 bool
234 is_builtin_name (const char *name)
236 if (strncmp (name, "__builtin_", 10) == 0)
237 return true;
238 if (strncmp (name, "__sync_", 7) == 0)
239 return true;
240 return false;
244 /* Return true if DECL is a function symbol representing a built-in. */
246 bool
247 is_builtin_fn (tree decl)
249 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
253 /* Return true if NODE should be considered for inline expansion regardless
254 of the optimization level. This means whenever a function is invoked with
255 its "internal" name, which normally contains the prefix "__builtin". */
257 static bool
258 called_as_built_in (tree node)
260 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
261 we want the name used to call the function, not the name it
262 will have. */
263 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
264 return is_builtin_name (name);
267 /* Compute values M and N such that M divides (address of EXP - N) and
268 such that N < M. Store N in *BITPOSP and return M.
270 Note that the address (and thus the alignment) computed here is based
271 on the address to which a symbol resolves, whereas DECL_ALIGN is based
272 on the address at which an object is actually located. These two
273 addresses are not always the same. For example, on ARM targets,
274 the address &foo of a Thumb function foo() has the lowest bit set,
275 whereas foo() itself starts on an even address. */
277 unsigned int
278 get_object_alignment_1 (tree exp, unsigned HOST_WIDE_INT *bitposp)
280 HOST_WIDE_INT bitsize, bitpos;
281 tree offset;
282 enum machine_mode mode;
283 int unsignedp, volatilep;
284 unsigned int align, inner;
286 /* Get the innermost object and the constant (bitpos) and possibly
287 variable (offset) offset of the access. */
288 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
289 &mode, &unsignedp, &volatilep, true);
291 /* Extract alignment information from the innermost object and
292 possibly adjust bitpos and offset. */
293 if (TREE_CODE (exp) == CONST_DECL)
294 exp = DECL_INITIAL (exp);
295 if (DECL_P (exp)
296 && TREE_CODE (exp) != LABEL_DECL)
298 if (TREE_CODE (exp) == FUNCTION_DECL)
300 /* Function addresses can encode extra information besides their
301 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
302 allows the low bit to be used as a virtual bit, we know
303 that the address itself must be 2-byte aligned. */
304 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
305 align = 2 * BITS_PER_UNIT;
306 else
307 align = BITS_PER_UNIT;
309 else
310 align = DECL_ALIGN (exp);
312 else if (CONSTANT_CLASS_P (exp))
314 align = TYPE_ALIGN (TREE_TYPE (exp));
315 #ifdef CONSTANT_ALIGNMENT
316 align = (unsigned)CONSTANT_ALIGNMENT (exp, align);
317 #endif
319 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
320 align = TYPE_ALIGN (TREE_TYPE (exp));
321 else if (TREE_CODE (exp) == INDIRECT_REF)
322 align = TYPE_ALIGN (TREE_TYPE (exp));
323 else if (TREE_CODE (exp) == MEM_REF)
325 tree addr = TREE_OPERAND (exp, 0);
326 struct ptr_info_def *pi;
327 if (TREE_CODE (addr) == BIT_AND_EXPR
328 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
330 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
331 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
332 align *= BITS_PER_UNIT;
333 addr = TREE_OPERAND (addr, 0);
335 else
336 align = BITS_PER_UNIT;
337 if (TREE_CODE (addr) == SSA_NAME
338 && (pi = SSA_NAME_PTR_INFO (addr)))
340 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
341 align = MAX (pi->align * BITS_PER_UNIT, align);
343 else if (TREE_CODE (addr) == ADDR_EXPR)
344 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0), ~0U));
345 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
347 else if (TREE_CODE (exp) == TARGET_MEM_REF)
349 struct ptr_info_def *pi;
350 tree addr = TMR_BASE (exp);
351 if (TREE_CODE (addr) == BIT_AND_EXPR
352 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
354 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
355 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
356 align *= BITS_PER_UNIT;
357 addr = TREE_OPERAND (addr, 0);
359 else
360 align = BITS_PER_UNIT;
361 if (TREE_CODE (addr) == SSA_NAME
362 && (pi = SSA_NAME_PTR_INFO (addr)))
364 bitpos += (pi->misalign * BITS_PER_UNIT) & ~(align - 1);
365 align = MAX (pi->align * BITS_PER_UNIT, align);
367 else if (TREE_CODE (addr) == ADDR_EXPR)
368 align = MAX (align, get_object_alignment (TREE_OPERAND (addr, 0), ~0U));
369 if (TMR_OFFSET (exp))
370 bitpos += TREE_INT_CST_LOW (TMR_OFFSET (exp)) * BITS_PER_UNIT;
371 if (TMR_INDEX (exp) && TMR_STEP (exp))
373 unsigned HOST_WIDE_INT step = TREE_INT_CST_LOW (TMR_STEP (exp));
374 align = MIN (align, (step & -step) * BITS_PER_UNIT);
376 else if (TMR_INDEX (exp))
377 align = BITS_PER_UNIT;
378 if (TMR_INDEX2 (exp))
379 align = BITS_PER_UNIT;
381 else
382 align = BITS_PER_UNIT;
384 /* If there is a non-constant offset part extract the maximum
385 alignment that can prevail. */
386 inner = ~0U;
387 while (offset)
389 tree next_offset;
391 if (TREE_CODE (offset) == PLUS_EXPR)
393 next_offset = TREE_OPERAND (offset, 0);
394 offset = TREE_OPERAND (offset, 1);
396 else
397 next_offset = NULL;
398 if (host_integerp (offset, 1))
400 /* Any overflow in calculating offset_bits won't change
401 the alignment. */
402 unsigned offset_bits
403 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
405 if (offset_bits)
406 inner = MIN (inner, (offset_bits & -offset_bits));
408 else if (TREE_CODE (offset) == MULT_EXPR
409 && host_integerp (TREE_OPERAND (offset, 1), 1))
411 /* Any overflow in calculating offset_factor won't change
412 the alignment. */
413 unsigned offset_factor
414 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
415 * BITS_PER_UNIT);
417 if (offset_factor)
418 inner = MIN (inner, (offset_factor & -offset_factor));
420 else
422 inner = MIN (inner, BITS_PER_UNIT);
423 break;
425 offset = next_offset;
428 /* Alignment is innermost object alignment adjusted by the constant
429 and non-constant offset parts. */
430 align = MIN (align, inner);
431 bitpos = bitpos & (align - 1);
433 *bitposp = bitpos;
434 return align;
437 /* Return the alignment in bits of EXP, an object.
438 Don't return more than MAX_ALIGN no matter what. */
440 unsigned int
441 get_object_alignment (tree exp, unsigned int max_align)
443 unsigned HOST_WIDE_INT bitpos = 0;
444 unsigned int align;
446 align = get_object_alignment_1 (exp, &bitpos);
448 /* align and bitpos now specify known low bits of the pointer.
449 ptr & (align - 1) == bitpos. */
451 if (bitpos != 0)
452 align = (bitpos & -bitpos);
454 return MIN (align, max_align);
457 /* Returns true iff we can trust that alignment information has been
458 calculated properly. */
460 bool
461 can_trust_pointer_alignment (void)
463 /* We rely on TER to compute accurate alignment information. */
464 return (optimize && flag_tree_ter);
467 /* Return the alignment in bits of EXP, a pointer valued expression.
468 But don't return more than MAX_ALIGN no matter what.
469 The alignment returned is, by default, the alignment of the thing that
470 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
472 Otherwise, look at the expression to see if we can do better, i.e., if the
473 expression is actually pointing at an object whose alignment is tighter. */
475 unsigned int
476 get_pointer_alignment (tree exp, unsigned int max_align)
478 STRIP_NOPS (exp);
480 if (TREE_CODE (exp) == ADDR_EXPR)
481 return get_object_alignment (TREE_OPERAND (exp, 0), max_align);
482 else if (TREE_CODE (exp) == SSA_NAME
483 && POINTER_TYPE_P (TREE_TYPE (exp)))
485 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
486 unsigned align;
487 if (!pi)
488 return BITS_PER_UNIT;
489 if (pi->misalign != 0)
490 align = (pi->misalign & -pi->misalign);
491 else
492 align = pi->align;
493 return MIN (max_align, align * BITS_PER_UNIT);
496 return POINTER_TYPE_P (TREE_TYPE (exp)) ? BITS_PER_UNIT : 0;
499 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
500 way, because it could contain a zero byte in the middle.
501 TREE_STRING_LENGTH is the size of the character array, not the string.
503 ONLY_VALUE should be nonzero if the result is not going to be emitted
504 into the instruction stream and zero if it is going to be expanded.
505 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
506 is returned, otherwise NULL, since
507 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
508 evaluate the side-effects.
510 The value returned is of type `ssizetype'.
512 Unfortunately, string_constant can't access the values of const char
513 arrays with initializers, so neither can we do so here. */
515 tree
516 c_strlen (tree src, int only_value)
518 tree offset_node;
519 HOST_WIDE_INT offset;
520 int max;
521 const char *ptr;
522 location_t loc;
524 STRIP_NOPS (src);
525 if (TREE_CODE (src) == COND_EXPR
526 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
528 tree len1, len2;
530 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
531 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
532 if (tree_int_cst_equal (len1, len2))
533 return len1;
536 if (TREE_CODE (src) == COMPOUND_EXPR
537 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
538 return c_strlen (TREE_OPERAND (src, 1), only_value);
540 loc = EXPR_LOC_OR_HERE (src);
542 src = string_constant (src, &offset_node);
543 if (src == 0)
544 return NULL_TREE;
546 max = TREE_STRING_LENGTH (src) - 1;
547 ptr = TREE_STRING_POINTER (src);
549 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
551 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
552 compute the offset to the following null if we don't know where to
553 start searching for it. */
554 int i;
556 for (i = 0; i < max; i++)
557 if (ptr[i] == 0)
558 return NULL_TREE;
560 /* We don't know the starting offset, but we do know that the string
561 has no internal zero bytes. We can assume that the offset falls
562 within the bounds of the string; otherwise, the programmer deserves
563 what he gets. Subtract the offset from the length of the string,
564 and return that. This would perhaps not be valid if we were dealing
565 with named arrays in addition to literal string constants. */
567 return size_diffop_loc (loc, size_int (max), offset_node);
570 /* We have a known offset into the string. Start searching there for
571 a null character if we can represent it as a single HOST_WIDE_INT. */
572 if (offset_node == 0)
573 offset = 0;
574 else if (! host_integerp (offset_node, 0))
575 offset = -1;
576 else
577 offset = tree_low_cst (offset_node, 0);
579 /* If the offset is known to be out of bounds, warn, and call strlen at
580 runtime. */
581 if (offset < 0 || offset > max)
583 /* Suppress multiple warnings for propagated constant strings. */
584 if (! TREE_NO_WARNING (src))
586 warning_at (loc, 0, "offset outside bounds of constant string");
587 TREE_NO_WARNING (src) = 1;
589 return NULL_TREE;
592 /* Use strlen to search for the first zero byte. Since any strings
593 constructed with build_string will have nulls appended, we win even
594 if we get handed something like (char[4])"abcd".
596 Since OFFSET is our starting index into the string, no further
597 calculation is needed. */
598 return ssize_int (strlen (ptr + offset));
601 /* Return a char pointer for a C string if it is a string constant
602 or sum of string constant and integer constant. */
604 static const char *
605 c_getstr (tree src)
607 tree offset_node;
609 src = string_constant (src, &offset_node);
610 if (src == 0)
611 return 0;
613 if (offset_node == 0)
614 return TREE_STRING_POINTER (src);
615 else if (!host_integerp (offset_node, 1)
616 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
617 return 0;
619 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
622 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
623 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
625 static rtx
626 c_readstr (const char *str, enum machine_mode mode)
628 HOST_WIDE_INT c[2];
629 HOST_WIDE_INT ch;
630 unsigned int i, j;
632 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
634 c[0] = 0;
635 c[1] = 0;
636 ch = 1;
637 for (i = 0; i < GET_MODE_SIZE (mode); i++)
639 j = i;
640 if (WORDS_BIG_ENDIAN)
641 j = GET_MODE_SIZE (mode) - i - 1;
642 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
643 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
644 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
645 j *= BITS_PER_UNIT;
646 gcc_assert (j < 2 * HOST_BITS_PER_WIDE_INT);
648 if (ch)
649 ch = (unsigned char) str[i];
650 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
652 return immed_double_const (c[0], c[1], mode);
655 /* Cast a target constant CST to target CHAR and if that value fits into
656 host char type, return zero and put that value into variable pointed to by
657 P. */
659 static int
660 target_char_cast (tree cst, char *p)
662 unsigned HOST_WIDE_INT val, hostval;
664 if (TREE_CODE (cst) != INTEGER_CST
665 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
666 return 1;
668 val = TREE_INT_CST_LOW (cst);
669 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
670 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
672 hostval = val;
673 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
674 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
676 if (val != hostval)
677 return 1;
679 *p = hostval;
680 return 0;
683 /* Similar to save_expr, but assumes that arbitrary code is not executed
684 in between the multiple evaluations. In particular, we assume that a
685 non-addressable local variable will not be modified. */
687 static tree
688 builtin_save_expr (tree exp)
690 if (TREE_CODE (exp) == SSA_NAME
691 || (TREE_ADDRESSABLE (exp) == 0
692 && (TREE_CODE (exp) == PARM_DECL
693 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
694 return exp;
696 return save_expr (exp);
699 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
700 times to get the address of either a higher stack frame, or a return
701 address located within it (depending on FNDECL_CODE). */
703 static rtx
704 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
706 int i;
708 #ifdef INITIAL_FRAME_ADDRESS_RTX
709 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
710 #else
711 rtx tem;
713 /* For a zero count with __builtin_return_address, we don't care what
714 frame address we return, because target-specific definitions will
715 override us. Therefore frame pointer elimination is OK, and using
716 the soft frame pointer is OK.
718 For a nonzero count, or a zero count with __builtin_frame_address,
719 we require a stable offset from the current frame pointer to the
720 previous one, so we must use the hard frame pointer, and
721 we must disable frame pointer elimination. */
722 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
723 tem = frame_pointer_rtx;
724 else
726 tem = hard_frame_pointer_rtx;
728 /* Tell reload not to eliminate the frame pointer. */
729 crtl->accesses_prior_frames = 1;
731 #endif
733 /* Some machines need special handling before we can access
734 arbitrary frames. For example, on the SPARC, we must first flush
735 all register windows to the stack. */
736 #ifdef SETUP_FRAME_ADDRESSES
737 if (count > 0)
738 SETUP_FRAME_ADDRESSES ();
739 #endif
741 /* On the SPARC, the return address is not in the frame, it is in a
742 register. There is no way to access it off of the current frame
743 pointer, but it can be accessed off the previous frame pointer by
744 reading the value from the register window save area. */
745 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
746 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
747 count--;
748 #endif
750 /* Scan back COUNT frames to the specified frame. */
751 for (i = 0; i < count; i++)
753 /* Assume the dynamic chain pointer is in the word that the
754 frame address points to, unless otherwise specified. */
755 #ifdef DYNAMIC_CHAIN_ADDRESS
756 tem = DYNAMIC_CHAIN_ADDRESS (tem);
757 #endif
758 tem = memory_address (Pmode, tem);
759 tem = gen_frame_mem (Pmode, tem);
760 tem = copy_to_reg (tem);
763 /* For __builtin_frame_address, return what we've got. But, on
764 the SPARC for example, we may have to add a bias. */
765 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
766 #ifdef FRAME_ADDR_RTX
767 return FRAME_ADDR_RTX (tem);
768 #else
769 return tem;
770 #endif
772 /* For __builtin_return_address, get the return address from that frame. */
773 #ifdef RETURN_ADDR_RTX
774 tem = RETURN_ADDR_RTX (count, tem);
775 #else
776 tem = memory_address (Pmode,
777 plus_constant (tem, GET_MODE_SIZE (Pmode)));
778 tem = gen_frame_mem (Pmode, tem);
779 #endif
780 return tem;
783 /* Alias set used for setjmp buffer. */
784 static alias_set_type setjmp_alias_set = -1;
786 /* Construct the leading half of a __builtin_setjmp call. Control will
787 return to RECEIVER_LABEL. This is also called directly by the SJLJ
788 exception handling code. */
790 void
791 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
793 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
794 rtx stack_save;
795 rtx mem;
797 if (setjmp_alias_set == -1)
798 setjmp_alias_set = new_alias_set ();
800 buf_addr = convert_memory_address (Pmode, buf_addr);
802 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
804 /* We store the frame pointer and the address of receiver_label in
805 the buffer and use the rest of it for the stack save area, which
806 is machine-dependent. */
808 mem = gen_rtx_MEM (Pmode, buf_addr);
809 set_mem_alias_set (mem, setjmp_alias_set);
810 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
812 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
813 set_mem_alias_set (mem, setjmp_alias_set);
815 emit_move_insn (validize_mem (mem),
816 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
818 stack_save = gen_rtx_MEM (sa_mode,
819 plus_constant (buf_addr,
820 2 * GET_MODE_SIZE (Pmode)));
821 set_mem_alias_set (stack_save, setjmp_alias_set);
822 emit_stack_save (SAVE_NONLOCAL, &stack_save);
824 /* If there is further processing to do, do it. */
825 #ifdef HAVE_builtin_setjmp_setup
826 if (HAVE_builtin_setjmp_setup)
827 emit_insn (gen_builtin_setjmp_setup (buf_addr));
828 #endif
830 /* We have a nonlocal label. */
831 cfun->has_nonlocal_label = 1;
834 /* Construct the trailing part of a __builtin_setjmp call. This is
835 also called directly by the SJLJ exception handling code. */
837 void
838 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
840 rtx chain;
842 /* Clobber the FP when we get here, so we have to make sure it's
843 marked as used by this function. */
844 emit_use (hard_frame_pointer_rtx);
846 /* Mark the static chain as clobbered here so life information
847 doesn't get messed up for it. */
848 chain = targetm.calls.static_chain (current_function_decl, true);
849 if (chain && REG_P (chain))
850 emit_clobber (chain);
852 /* Now put in the code to restore the frame pointer, and argument
853 pointer, if needed. */
854 #ifdef HAVE_nonlocal_goto
855 if (! HAVE_nonlocal_goto)
856 #endif
858 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
859 /* This might change the hard frame pointer in ways that aren't
860 apparent to early optimization passes, so force a clobber. */
861 emit_clobber (hard_frame_pointer_rtx);
864 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
865 if (fixed_regs[ARG_POINTER_REGNUM])
867 #ifdef ELIMINABLE_REGS
868 size_t i;
869 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
871 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
872 if (elim_regs[i].from == ARG_POINTER_REGNUM
873 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
874 break;
876 if (i == ARRAY_SIZE (elim_regs))
877 #endif
879 /* Now restore our arg pointer from the address at which it
880 was saved in our stack frame. */
881 emit_move_insn (crtl->args.internal_arg_pointer,
882 copy_to_reg (get_arg_pointer_save_area ()));
885 #endif
887 #ifdef HAVE_builtin_setjmp_receiver
888 if (HAVE_builtin_setjmp_receiver)
889 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
890 else
891 #endif
892 #ifdef HAVE_nonlocal_goto_receiver
893 if (HAVE_nonlocal_goto_receiver)
894 emit_insn (gen_nonlocal_goto_receiver ());
895 else
896 #endif
897 { /* Nothing */ }
899 /* We must not allow the code we just generated to be reordered by
900 scheduling. Specifically, the update of the frame pointer must
901 happen immediately, not later. */
902 emit_insn (gen_blockage ());
905 /* __builtin_longjmp is passed a pointer to an array of five words (not
906 all will be used on all machines). It operates similarly to the C
907 library function of the same name, but is more efficient. Much of
908 the code below is copied from the handling of non-local gotos. */
910 static void
911 expand_builtin_longjmp (rtx buf_addr, rtx value)
913 rtx fp, lab, stack, insn, last;
914 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
916 /* DRAP is needed for stack realign if longjmp is expanded to current
917 function */
918 if (SUPPORTS_STACK_ALIGNMENT)
919 crtl->need_drap = true;
921 if (setjmp_alias_set == -1)
922 setjmp_alias_set = new_alias_set ();
924 buf_addr = convert_memory_address (Pmode, buf_addr);
926 buf_addr = force_reg (Pmode, buf_addr);
928 /* We require that the user must pass a second argument of 1, because
929 that is what builtin_setjmp will return. */
930 gcc_assert (value == const1_rtx);
932 last = get_last_insn ();
933 #ifdef HAVE_builtin_longjmp
934 if (HAVE_builtin_longjmp)
935 emit_insn (gen_builtin_longjmp (buf_addr));
936 else
937 #endif
939 fp = gen_rtx_MEM (Pmode, buf_addr);
940 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
941 GET_MODE_SIZE (Pmode)));
943 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
944 2 * GET_MODE_SIZE (Pmode)));
945 set_mem_alias_set (fp, setjmp_alias_set);
946 set_mem_alias_set (lab, setjmp_alias_set);
947 set_mem_alias_set (stack, setjmp_alias_set);
949 /* Pick up FP, label, and SP from the block and jump. This code is
950 from expand_goto in stmt.c; see there for detailed comments. */
951 #ifdef HAVE_nonlocal_goto
952 if (HAVE_nonlocal_goto)
953 /* We have to pass a value to the nonlocal_goto pattern that will
954 get copied into the static_chain pointer, but it does not matter
955 what that value is, because builtin_setjmp does not use it. */
956 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
957 else
958 #endif
960 lab = copy_to_reg (lab);
962 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
963 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
965 emit_move_insn (hard_frame_pointer_rtx, fp);
966 emit_stack_restore (SAVE_NONLOCAL, stack);
968 emit_use (hard_frame_pointer_rtx);
969 emit_use (stack_pointer_rtx);
970 emit_indirect_jump (lab);
974 /* Search backwards and mark the jump insn as a non-local goto.
975 Note that this precludes the use of __builtin_longjmp to a
976 __builtin_setjmp target in the same function. However, we've
977 already cautioned the user that these functions are for
978 internal exception handling use only. */
979 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
981 gcc_assert (insn != last);
983 if (JUMP_P (insn))
985 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
986 break;
988 else if (CALL_P (insn))
989 break;
993 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
994 and the address of the save area. */
996 static rtx
997 expand_builtin_nonlocal_goto (tree exp)
999 tree t_label, t_save_area;
1000 rtx r_label, r_save_area, r_fp, r_sp, insn;
1002 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1003 return NULL_RTX;
1005 t_label = CALL_EXPR_ARG (exp, 0);
1006 t_save_area = CALL_EXPR_ARG (exp, 1);
1008 r_label = expand_normal (t_label);
1009 r_label = convert_memory_address (Pmode, r_label);
1010 r_save_area = expand_normal (t_save_area);
1011 r_save_area = convert_memory_address (Pmode, r_save_area);
1012 /* Copy the address of the save location to a register just in case it was
1013 based on the frame pointer. */
1014 r_save_area = copy_to_reg (r_save_area);
1015 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1016 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1017 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
1019 crtl->has_nonlocal_goto = 1;
1021 #ifdef HAVE_nonlocal_goto
1022 /* ??? We no longer need to pass the static chain value, afaik. */
1023 if (HAVE_nonlocal_goto)
1024 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1025 else
1026 #endif
1028 r_label = copy_to_reg (r_label);
1030 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1031 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1033 /* Restore frame pointer for containing function. */
1034 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1035 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1037 /* USE of hard_frame_pointer_rtx added for consistency;
1038 not clear if really needed. */
1039 emit_use (hard_frame_pointer_rtx);
1040 emit_use (stack_pointer_rtx);
1042 /* If the architecture is using a GP register, we must
1043 conservatively assume that the target function makes use of it.
1044 The prologue of functions with nonlocal gotos must therefore
1045 initialize the GP register to the appropriate value, and we
1046 must then make sure that this value is live at the point
1047 of the jump. (Note that this doesn't necessarily apply
1048 to targets with a nonlocal_goto pattern; they are free
1049 to implement it in their own way. Note also that this is
1050 a no-op if the GP register is a global invariant.) */
1051 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1052 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1053 emit_use (pic_offset_table_rtx);
1055 emit_indirect_jump (r_label);
1058 /* Search backwards to the jump insn and mark it as a
1059 non-local goto. */
1060 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1062 if (JUMP_P (insn))
1064 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1065 break;
1067 else if (CALL_P (insn))
1068 break;
1071 return const0_rtx;
1074 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1075 (not all will be used on all machines) that was passed to __builtin_setjmp.
1076 It updates the stack pointer in that block to correspond to the current
1077 stack pointer. */
1079 static void
1080 expand_builtin_update_setjmp_buf (rtx buf_addr)
1082 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1083 rtx stack_save
1084 = gen_rtx_MEM (sa_mode,
1085 memory_address
1086 (sa_mode,
1087 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1089 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1092 /* Expand a call to __builtin_prefetch. For a target that does not support
1093 data prefetch, evaluate the memory address argument in case it has side
1094 effects. */
1096 static void
1097 expand_builtin_prefetch (tree exp)
1099 tree arg0, arg1, arg2;
1100 int nargs;
1101 rtx op0, op1, op2;
1103 if (!validate_arglist (exp, POINTER_TYPE, 0))
1104 return;
1106 arg0 = CALL_EXPR_ARG (exp, 0);
1108 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1109 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1110 locality). */
1111 nargs = call_expr_nargs (exp);
1112 if (nargs > 1)
1113 arg1 = CALL_EXPR_ARG (exp, 1);
1114 else
1115 arg1 = integer_zero_node;
1116 if (nargs > 2)
1117 arg2 = CALL_EXPR_ARG (exp, 2);
1118 else
1119 arg2 = integer_three_node;
1121 /* Argument 0 is an address. */
1122 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1124 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1125 if (TREE_CODE (arg1) != INTEGER_CST)
1127 error ("second argument to %<__builtin_prefetch%> must be a constant");
1128 arg1 = integer_zero_node;
1130 op1 = expand_normal (arg1);
1131 /* Argument 1 must be either zero or one. */
1132 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1134 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1135 " using zero");
1136 op1 = const0_rtx;
1139 /* Argument 2 (locality) must be a compile-time constant int. */
1140 if (TREE_CODE (arg2) != INTEGER_CST)
1142 error ("third argument to %<__builtin_prefetch%> must be a constant");
1143 arg2 = integer_zero_node;
1145 op2 = expand_normal (arg2);
1146 /* Argument 2 must be 0, 1, 2, or 3. */
1147 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1149 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1150 op2 = const0_rtx;
1153 #ifdef HAVE_prefetch
1154 if (HAVE_prefetch)
1156 struct expand_operand ops[3];
1158 create_address_operand (&ops[0], op0);
1159 create_integer_operand (&ops[1], INTVAL (op1));
1160 create_integer_operand (&ops[2], INTVAL (op2));
1161 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1162 return;
1164 #endif
1166 /* Don't do anything with direct references to volatile memory, but
1167 generate code to handle other side effects. */
1168 if (!MEM_P (op0) && side_effects_p (op0))
1169 emit_insn (op0);
1172 /* Get a MEM rtx for expression EXP which is the address of an operand
1173 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1174 the maximum length of the block of memory that might be accessed or
1175 NULL if unknown. */
1177 static rtx
1178 get_memory_rtx (tree exp, tree len)
1180 tree orig_exp = exp;
1181 rtx addr, mem;
1182 HOST_WIDE_INT off;
1184 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1185 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1186 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1187 exp = TREE_OPERAND (exp, 0);
1189 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1190 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1192 /* Get an expression we can use to find the attributes to assign to MEM.
1193 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1194 we can. First remove any nops. */
1195 while (CONVERT_EXPR_P (exp)
1196 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1197 exp = TREE_OPERAND (exp, 0);
1199 off = 0;
1200 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1201 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1202 && host_integerp (TREE_OPERAND (exp, 1), 0)
1203 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1204 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1205 else if (TREE_CODE (exp) == ADDR_EXPR)
1206 exp = TREE_OPERAND (exp, 0);
1207 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1208 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1209 else
1210 exp = NULL;
1212 /* Honor attributes derived from exp, except for the alias set
1213 (as builtin stringops may alias with anything) and the size
1214 (as stringops may access multiple array elements). */
1215 if (exp)
1217 set_mem_attributes (mem, exp, 0);
1219 if (off)
1220 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1222 /* Allow the string and memory builtins to overflow from one
1223 field into another, see http://gcc.gnu.org/PR23561.
1224 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1225 memory accessed by the string or memory builtin will fit
1226 within the field. */
1227 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1229 tree mem_expr = MEM_EXPR (mem);
1230 HOST_WIDE_INT offset = -1, length = -1;
1231 tree inner = exp;
1233 while (TREE_CODE (inner) == ARRAY_REF
1234 || CONVERT_EXPR_P (inner)
1235 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1236 || TREE_CODE (inner) == SAVE_EXPR)
1237 inner = TREE_OPERAND (inner, 0);
1239 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1241 if (MEM_OFFSET (mem)
1242 && CONST_INT_P (MEM_OFFSET (mem)))
1243 offset = INTVAL (MEM_OFFSET (mem));
1245 if (offset >= 0 && len && host_integerp (len, 0))
1246 length = tree_low_cst (len, 0);
1248 while (TREE_CODE (inner) == COMPONENT_REF)
1250 tree field = TREE_OPERAND (inner, 1);
1251 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1252 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1254 /* Bitfields are generally not byte-addressable. */
1255 gcc_assert (!DECL_BIT_FIELD (field)
1256 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1257 % BITS_PER_UNIT) == 0
1258 && host_integerp (DECL_SIZE (field), 0)
1259 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1260 % BITS_PER_UNIT) == 0));
1262 /* If we can prove that the memory starting at XEXP (mem, 0) and
1263 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1264 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1265 fields without DECL_SIZE_UNIT like flexible array members. */
1266 if (length >= 0
1267 && DECL_SIZE_UNIT (field)
1268 && host_integerp (DECL_SIZE_UNIT (field), 0))
1270 HOST_WIDE_INT size
1271 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1272 if (offset <= size
1273 && length <= size
1274 && offset + length <= size)
1275 break;
1278 if (offset >= 0
1279 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1280 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1281 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1282 / BITS_PER_UNIT;
1283 else
1285 offset = -1;
1286 length = -1;
1289 mem_expr = TREE_OPERAND (mem_expr, 0);
1290 inner = TREE_OPERAND (inner, 0);
1293 if (mem_expr == NULL)
1294 offset = -1;
1295 if (mem_expr != MEM_EXPR (mem))
1297 set_mem_expr (mem, mem_expr);
1298 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1301 set_mem_alias_set (mem, 0);
1302 set_mem_size (mem, NULL_RTX);
1305 return mem;
1308 /* Built-in functions to perform an untyped call and return. */
1310 #define apply_args_mode \
1311 (this_target_builtins->x_apply_args_mode)
1312 #define apply_result_mode \
1313 (this_target_builtins->x_apply_result_mode)
1315 /* Return the size required for the block returned by __builtin_apply_args,
1316 and initialize apply_args_mode. */
1318 static int
1319 apply_args_size (void)
1321 static int size = -1;
1322 int align;
1323 unsigned int regno;
1324 enum machine_mode mode;
1326 /* The values computed by this function never change. */
1327 if (size < 0)
1329 /* The first value is the incoming arg-pointer. */
1330 size = GET_MODE_SIZE (Pmode);
1332 /* The second value is the structure value address unless this is
1333 passed as an "invisible" first argument. */
1334 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1335 size += GET_MODE_SIZE (Pmode);
1337 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1338 if (FUNCTION_ARG_REGNO_P (regno))
1340 mode = targetm.calls.get_raw_arg_mode (regno);
1342 gcc_assert (mode != VOIDmode);
1344 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1345 if (size % align != 0)
1346 size = CEIL (size, align) * align;
1347 size += GET_MODE_SIZE (mode);
1348 apply_args_mode[regno] = mode;
1350 else
1352 apply_args_mode[regno] = VOIDmode;
1355 return size;
1358 /* Return the size required for the block returned by __builtin_apply,
1359 and initialize apply_result_mode. */
1361 static int
1362 apply_result_size (void)
1364 static int size = -1;
1365 int align, regno;
1366 enum machine_mode mode;
1368 /* The values computed by this function never change. */
1369 if (size < 0)
1371 size = 0;
1373 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1374 if (targetm.calls.function_value_regno_p (regno))
1376 mode = targetm.calls.get_raw_result_mode (regno);
1378 gcc_assert (mode != VOIDmode);
1380 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1381 if (size % align != 0)
1382 size = CEIL (size, align) * align;
1383 size += GET_MODE_SIZE (mode);
1384 apply_result_mode[regno] = mode;
1386 else
1387 apply_result_mode[regno] = VOIDmode;
1389 /* Allow targets that use untyped_call and untyped_return to override
1390 the size so that machine-specific information can be stored here. */
1391 #ifdef APPLY_RESULT_SIZE
1392 size = APPLY_RESULT_SIZE;
1393 #endif
1395 return size;
1398 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1399 /* Create a vector describing the result block RESULT. If SAVEP is true,
1400 the result block is used to save the values; otherwise it is used to
1401 restore the values. */
1403 static rtx
1404 result_vector (int savep, rtx result)
1406 int regno, size, align, nelts;
1407 enum machine_mode mode;
1408 rtx reg, mem;
1409 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1411 size = nelts = 0;
1412 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1413 if ((mode = apply_result_mode[regno]) != VOIDmode)
1415 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1416 if (size % align != 0)
1417 size = CEIL (size, align) * align;
1418 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1419 mem = adjust_address (result, mode, size);
1420 savevec[nelts++] = (savep
1421 ? gen_rtx_SET (VOIDmode, mem, reg)
1422 : gen_rtx_SET (VOIDmode, reg, mem));
1423 size += GET_MODE_SIZE (mode);
1425 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1427 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1429 /* Save the state required to perform an untyped call with the same
1430 arguments as were passed to the current function. */
1432 static rtx
1433 expand_builtin_apply_args_1 (void)
1435 rtx registers, tem;
1436 int size, align, regno;
1437 enum machine_mode mode;
1438 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1440 /* Create a block where the arg-pointer, structure value address,
1441 and argument registers can be saved. */
1442 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1444 /* Walk past the arg-pointer and structure value address. */
1445 size = GET_MODE_SIZE (Pmode);
1446 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1447 size += GET_MODE_SIZE (Pmode);
1449 /* Save each register used in calling a function to the block. */
1450 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1451 if ((mode = apply_args_mode[regno]) != VOIDmode)
1453 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1454 if (size % align != 0)
1455 size = CEIL (size, align) * align;
1457 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1459 emit_move_insn (adjust_address (registers, mode, size), tem);
1460 size += GET_MODE_SIZE (mode);
1463 /* Save the arg pointer to the block. */
1464 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1465 #ifdef STACK_GROWS_DOWNWARD
1466 /* We need the pointer as the caller actually passed them to us, not
1467 as we might have pretended they were passed. Make sure it's a valid
1468 operand, as emit_move_insn isn't expected to handle a PLUS. */
1470 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1471 NULL_RTX);
1472 #endif
1473 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1475 size = GET_MODE_SIZE (Pmode);
1477 /* Save the structure value address unless this is passed as an
1478 "invisible" first argument. */
1479 if (struct_incoming_value)
1481 emit_move_insn (adjust_address (registers, Pmode, size),
1482 copy_to_reg (struct_incoming_value));
1483 size += GET_MODE_SIZE (Pmode);
1486 /* Return the address of the block. */
1487 return copy_addr_to_reg (XEXP (registers, 0));
1490 /* __builtin_apply_args returns block of memory allocated on
1491 the stack into which is stored the arg pointer, structure
1492 value address, static chain, and all the registers that might
1493 possibly be used in performing a function call. The code is
1494 moved to the start of the function so the incoming values are
1495 saved. */
1497 static rtx
1498 expand_builtin_apply_args (void)
1500 /* Don't do __builtin_apply_args more than once in a function.
1501 Save the result of the first call and reuse it. */
1502 if (apply_args_value != 0)
1503 return apply_args_value;
1505 /* When this function is called, it means that registers must be
1506 saved on entry to this function. So we migrate the
1507 call to the first insn of this function. */
1508 rtx temp;
1509 rtx seq;
1511 start_sequence ();
1512 temp = expand_builtin_apply_args_1 ();
1513 seq = get_insns ();
1514 end_sequence ();
1516 apply_args_value = temp;
1518 /* Put the insns after the NOTE that starts the function.
1519 If this is inside a start_sequence, make the outer-level insn
1520 chain current, so the code is placed at the start of the
1521 function. If internal_arg_pointer is a non-virtual pseudo,
1522 it needs to be placed after the function that initializes
1523 that pseudo. */
1524 push_topmost_sequence ();
1525 if (REG_P (crtl->args.internal_arg_pointer)
1526 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1527 emit_insn_before (seq, parm_birth_insn);
1528 else
1529 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1530 pop_topmost_sequence ();
1531 return temp;
1535 /* Perform an untyped call and save the state required to perform an
1536 untyped return of whatever value was returned by the given function. */
1538 static rtx
1539 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1541 int size, align, regno;
1542 enum machine_mode mode;
1543 rtx incoming_args, result, reg, dest, src, call_insn;
1544 rtx old_stack_level = 0;
1545 rtx call_fusage = 0;
1546 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1548 arguments = convert_memory_address (Pmode, arguments);
1550 /* Create a block where the return registers can be saved. */
1551 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1553 /* Fetch the arg pointer from the ARGUMENTS block. */
1554 incoming_args = gen_reg_rtx (Pmode);
1555 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1556 #ifndef STACK_GROWS_DOWNWARD
1557 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1558 incoming_args, 0, OPTAB_LIB_WIDEN);
1559 #endif
1561 /* Push a new argument block and copy the arguments. Do not allow
1562 the (potential) memcpy call below to interfere with our stack
1563 manipulations. */
1564 do_pending_stack_adjust ();
1565 NO_DEFER_POP;
1567 /* Save the stack with nonlocal if available. */
1568 #ifdef HAVE_save_stack_nonlocal
1569 if (HAVE_save_stack_nonlocal)
1570 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1571 else
1572 #endif
1573 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1575 /* Allocate a block of memory onto the stack and copy the memory
1576 arguments to the outgoing arguments address. We can pass TRUE
1577 as the 4th argument because we just saved the stack pointer
1578 and will restore it right after the call. */
1579 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1581 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1582 may have already set current_function_calls_alloca to true.
1583 current_function_calls_alloca won't be set if argsize is zero,
1584 so we have to guarantee need_drap is true here. */
1585 if (SUPPORTS_STACK_ALIGNMENT)
1586 crtl->need_drap = true;
1588 dest = virtual_outgoing_args_rtx;
1589 #ifndef STACK_GROWS_DOWNWARD
1590 if (CONST_INT_P (argsize))
1591 dest = plus_constant (dest, -INTVAL (argsize));
1592 else
1593 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1594 #endif
1595 dest = gen_rtx_MEM (BLKmode, dest);
1596 set_mem_align (dest, PARM_BOUNDARY);
1597 src = gen_rtx_MEM (BLKmode, incoming_args);
1598 set_mem_align (src, PARM_BOUNDARY);
1599 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1601 /* Refer to the argument block. */
1602 apply_args_size ();
1603 arguments = gen_rtx_MEM (BLKmode, arguments);
1604 set_mem_align (arguments, PARM_BOUNDARY);
1606 /* Walk past the arg-pointer and structure value address. */
1607 size = GET_MODE_SIZE (Pmode);
1608 if (struct_value)
1609 size += GET_MODE_SIZE (Pmode);
1611 /* Restore each of the registers previously saved. Make USE insns
1612 for each of these registers for use in making the call. */
1613 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1614 if ((mode = apply_args_mode[regno]) != VOIDmode)
1616 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1617 if (size % align != 0)
1618 size = CEIL (size, align) * align;
1619 reg = gen_rtx_REG (mode, regno);
1620 emit_move_insn (reg, adjust_address (arguments, mode, size));
1621 use_reg (&call_fusage, reg);
1622 size += GET_MODE_SIZE (mode);
1625 /* Restore the structure value address unless this is passed as an
1626 "invisible" first argument. */
1627 size = GET_MODE_SIZE (Pmode);
1628 if (struct_value)
1630 rtx value = gen_reg_rtx (Pmode);
1631 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1632 emit_move_insn (struct_value, value);
1633 if (REG_P (struct_value))
1634 use_reg (&call_fusage, struct_value);
1635 size += GET_MODE_SIZE (Pmode);
1638 /* All arguments and registers used for the call are set up by now! */
1639 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1641 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1642 and we don't want to load it into a register as an optimization,
1643 because prepare_call_address already did it if it should be done. */
1644 if (GET_CODE (function) != SYMBOL_REF)
1645 function = memory_address (FUNCTION_MODE, function);
1647 /* Generate the actual call instruction and save the return value. */
1648 #ifdef HAVE_untyped_call
1649 if (HAVE_untyped_call)
1650 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1651 result, result_vector (1, result)));
1652 else
1653 #endif
1654 #ifdef HAVE_call_value
1655 if (HAVE_call_value)
1657 rtx valreg = 0;
1659 /* Locate the unique return register. It is not possible to
1660 express a call that sets more than one return register using
1661 call_value; use untyped_call for that. In fact, untyped_call
1662 only needs to save the return registers in the given block. */
1663 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1664 if ((mode = apply_result_mode[regno]) != VOIDmode)
1666 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1668 valreg = gen_rtx_REG (mode, regno);
1671 emit_call_insn (GEN_CALL_VALUE (valreg,
1672 gen_rtx_MEM (FUNCTION_MODE, function),
1673 const0_rtx, NULL_RTX, const0_rtx));
1675 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1677 else
1678 #endif
1679 gcc_unreachable ();
1681 /* Find the CALL insn we just emitted, and attach the register usage
1682 information. */
1683 call_insn = last_call_insn ();
1684 add_function_usage_to (call_insn, call_fusage);
1686 /* Restore the stack. */
1687 #ifdef HAVE_save_stack_nonlocal
1688 if (HAVE_save_stack_nonlocal)
1689 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1690 else
1691 #endif
1692 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1694 OK_DEFER_POP;
1696 /* Return the address of the result block. */
1697 result = copy_addr_to_reg (XEXP (result, 0));
1698 return convert_memory_address (ptr_mode, result);
1701 /* Perform an untyped return. */
1703 static void
1704 expand_builtin_return (rtx result)
1706 int size, align, regno;
1707 enum machine_mode mode;
1708 rtx reg;
1709 rtx call_fusage = 0;
1711 result = convert_memory_address (Pmode, result);
1713 apply_result_size ();
1714 result = gen_rtx_MEM (BLKmode, result);
1716 #ifdef HAVE_untyped_return
1717 if (HAVE_untyped_return)
1719 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1720 emit_barrier ();
1721 return;
1723 #endif
1725 /* Restore the return value and note that each value is used. */
1726 size = 0;
1727 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1728 if ((mode = apply_result_mode[regno]) != VOIDmode)
1730 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1731 if (size % align != 0)
1732 size = CEIL (size, align) * align;
1733 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1734 emit_move_insn (reg, adjust_address (result, mode, size));
1736 push_to_sequence (call_fusage);
1737 emit_use (reg);
1738 call_fusage = get_insns ();
1739 end_sequence ();
1740 size += GET_MODE_SIZE (mode);
1743 /* Put the USE insns before the return. */
1744 emit_insn (call_fusage);
1746 /* Return whatever values was restored by jumping directly to the end
1747 of the function. */
1748 expand_naked_return ();
1751 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1753 static enum type_class
1754 type_to_class (tree type)
1756 switch (TREE_CODE (type))
1758 case VOID_TYPE: return void_type_class;
1759 case INTEGER_TYPE: return integer_type_class;
1760 case ENUMERAL_TYPE: return enumeral_type_class;
1761 case BOOLEAN_TYPE: return boolean_type_class;
1762 case POINTER_TYPE: return pointer_type_class;
1763 case REFERENCE_TYPE: return reference_type_class;
1764 case OFFSET_TYPE: return offset_type_class;
1765 case REAL_TYPE: return real_type_class;
1766 case COMPLEX_TYPE: return complex_type_class;
1767 case FUNCTION_TYPE: return function_type_class;
1768 case METHOD_TYPE: return method_type_class;
1769 case RECORD_TYPE: return record_type_class;
1770 case UNION_TYPE:
1771 case QUAL_UNION_TYPE: return union_type_class;
1772 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1773 ? string_type_class : array_type_class);
1774 case LANG_TYPE: return lang_type_class;
1775 default: return no_type_class;
1779 /* Expand a call EXP to __builtin_classify_type. */
1781 static rtx
1782 expand_builtin_classify_type (tree exp)
1784 if (call_expr_nargs (exp))
1785 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1786 return GEN_INT (no_type_class);
1789 /* This helper macro, meant to be used in mathfn_built_in below,
1790 determines which among a set of three builtin math functions is
1791 appropriate for a given type mode. The `F' and `L' cases are
1792 automatically generated from the `double' case. */
1793 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1794 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1795 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1796 fcodel = BUILT_IN_MATHFN##L ; break;
1797 /* Similar to above, but appends _R after any F/L suffix. */
1798 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1799 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1800 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1801 fcodel = BUILT_IN_MATHFN##L_R ; break;
1803 /* Return mathematic function equivalent to FN but operating directly
1804 on TYPE, if available. If IMPLICIT is true find the function in
1805 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1806 can't do the conversion, return zero. */
1808 static tree
1809 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1811 tree const *const fn_arr
1812 = implicit ? implicit_built_in_decls : built_in_decls;
1813 enum built_in_function fcode, fcodef, fcodel;
1815 switch (fn)
1817 CASE_MATHFN (BUILT_IN_ACOS)
1818 CASE_MATHFN (BUILT_IN_ACOSH)
1819 CASE_MATHFN (BUILT_IN_ASIN)
1820 CASE_MATHFN (BUILT_IN_ASINH)
1821 CASE_MATHFN (BUILT_IN_ATAN)
1822 CASE_MATHFN (BUILT_IN_ATAN2)
1823 CASE_MATHFN (BUILT_IN_ATANH)
1824 CASE_MATHFN (BUILT_IN_CBRT)
1825 CASE_MATHFN (BUILT_IN_CEIL)
1826 CASE_MATHFN (BUILT_IN_CEXPI)
1827 CASE_MATHFN (BUILT_IN_COPYSIGN)
1828 CASE_MATHFN (BUILT_IN_COS)
1829 CASE_MATHFN (BUILT_IN_COSH)
1830 CASE_MATHFN (BUILT_IN_DREM)
1831 CASE_MATHFN (BUILT_IN_ERF)
1832 CASE_MATHFN (BUILT_IN_ERFC)
1833 CASE_MATHFN (BUILT_IN_EXP)
1834 CASE_MATHFN (BUILT_IN_EXP10)
1835 CASE_MATHFN (BUILT_IN_EXP2)
1836 CASE_MATHFN (BUILT_IN_EXPM1)
1837 CASE_MATHFN (BUILT_IN_FABS)
1838 CASE_MATHFN (BUILT_IN_FDIM)
1839 CASE_MATHFN (BUILT_IN_FLOOR)
1840 CASE_MATHFN (BUILT_IN_FMA)
1841 CASE_MATHFN (BUILT_IN_FMAX)
1842 CASE_MATHFN (BUILT_IN_FMIN)
1843 CASE_MATHFN (BUILT_IN_FMOD)
1844 CASE_MATHFN (BUILT_IN_FREXP)
1845 CASE_MATHFN (BUILT_IN_GAMMA)
1846 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1847 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1848 CASE_MATHFN (BUILT_IN_HYPOT)
1849 CASE_MATHFN (BUILT_IN_ILOGB)
1850 CASE_MATHFN (BUILT_IN_INF)
1851 CASE_MATHFN (BUILT_IN_ISINF)
1852 CASE_MATHFN (BUILT_IN_J0)
1853 CASE_MATHFN (BUILT_IN_J1)
1854 CASE_MATHFN (BUILT_IN_JN)
1855 CASE_MATHFN (BUILT_IN_LCEIL)
1856 CASE_MATHFN (BUILT_IN_LDEXP)
1857 CASE_MATHFN (BUILT_IN_LFLOOR)
1858 CASE_MATHFN (BUILT_IN_LGAMMA)
1859 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1860 CASE_MATHFN (BUILT_IN_LLCEIL)
1861 CASE_MATHFN (BUILT_IN_LLFLOOR)
1862 CASE_MATHFN (BUILT_IN_LLRINT)
1863 CASE_MATHFN (BUILT_IN_LLROUND)
1864 CASE_MATHFN (BUILT_IN_LOG)
1865 CASE_MATHFN (BUILT_IN_LOG10)
1866 CASE_MATHFN (BUILT_IN_LOG1P)
1867 CASE_MATHFN (BUILT_IN_LOG2)
1868 CASE_MATHFN (BUILT_IN_LOGB)
1869 CASE_MATHFN (BUILT_IN_LRINT)
1870 CASE_MATHFN (BUILT_IN_LROUND)
1871 CASE_MATHFN (BUILT_IN_MODF)
1872 CASE_MATHFN (BUILT_IN_NAN)
1873 CASE_MATHFN (BUILT_IN_NANS)
1874 CASE_MATHFN (BUILT_IN_NEARBYINT)
1875 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1876 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1877 CASE_MATHFN (BUILT_IN_POW)
1878 CASE_MATHFN (BUILT_IN_POWI)
1879 CASE_MATHFN (BUILT_IN_POW10)
1880 CASE_MATHFN (BUILT_IN_REMAINDER)
1881 CASE_MATHFN (BUILT_IN_REMQUO)
1882 CASE_MATHFN (BUILT_IN_RINT)
1883 CASE_MATHFN (BUILT_IN_ROUND)
1884 CASE_MATHFN (BUILT_IN_SCALB)
1885 CASE_MATHFN (BUILT_IN_SCALBLN)
1886 CASE_MATHFN (BUILT_IN_SCALBN)
1887 CASE_MATHFN (BUILT_IN_SIGNBIT)
1888 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1889 CASE_MATHFN (BUILT_IN_SIN)
1890 CASE_MATHFN (BUILT_IN_SINCOS)
1891 CASE_MATHFN (BUILT_IN_SINH)
1892 CASE_MATHFN (BUILT_IN_SQRT)
1893 CASE_MATHFN (BUILT_IN_TAN)
1894 CASE_MATHFN (BUILT_IN_TANH)
1895 CASE_MATHFN (BUILT_IN_TGAMMA)
1896 CASE_MATHFN (BUILT_IN_TRUNC)
1897 CASE_MATHFN (BUILT_IN_Y0)
1898 CASE_MATHFN (BUILT_IN_Y1)
1899 CASE_MATHFN (BUILT_IN_YN)
1901 default:
1902 return NULL_TREE;
1905 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1906 return fn_arr[fcode];
1907 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1908 return fn_arr[fcodef];
1909 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1910 return fn_arr[fcodel];
1911 else
1912 return NULL_TREE;
1915 /* Like mathfn_built_in_1(), but always use the implicit array. */
1917 tree
1918 mathfn_built_in (tree type, enum built_in_function fn)
1920 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1923 /* If errno must be maintained, expand the RTL to check if the result,
1924 TARGET, of a built-in function call, EXP, is NaN, and if so set
1925 errno to EDOM. */
1927 static void
1928 expand_errno_check (tree exp, rtx target)
1930 rtx lab = gen_label_rtx ();
1932 /* Test the result; if it is NaN, set errno=EDOM because
1933 the argument was not in the domain. */
1934 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1935 NULL_RTX, NULL_RTX, lab,
1936 /* The jump is very likely. */
1937 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1939 #ifdef TARGET_EDOM
1940 /* If this built-in doesn't throw an exception, set errno directly. */
1941 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1943 #ifdef GEN_ERRNO_RTX
1944 rtx errno_rtx = GEN_ERRNO_RTX;
1945 #else
1946 rtx errno_rtx
1947 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1948 #endif
1949 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1950 emit_label (lab);
1951 return;
1953 #endif
1955 /* Make sure the library call isn't expanded as a tail call. */
1956 CALL_EXPR_TAILCALL (exp) = 0;
1958 /* We can't set errno=EDOM directly; let the library call do it.
1959 Pop the arguments right away in case the call gets deleted. */
1960 NO_DEFER_POP;
1961 expand_call (exp, target, 0);
1962 OK_DEFER_POP;
1963 emit_label (lab);
1966 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1967 Return NULL_RTX if a normal call should be emitted rather than expanding
1968 the function in-line. EXP is the expression that is a call to the builtin
1969 function; if convenient, the result should be placed in TARGET.
1970 SUBTARGET may be used as the target for computing one of EXP's operands. */
1972 static rtx
1973 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1975 optab builtin_optab;
1976 rtx op0, insns;
1977 tree fndecl = get_callee_fndecl (exp);
1978 enum machine_mode mode;
1979 bool errno_set = false;
1980 tree arg;
1982 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1983 return NULL_RTX;
1985 arg = CALL_EXPR_ARG (exp, 0);
1987 switch (DECL_FUNCTION_CODE (fndecl))
1989 CASE_FLT_FN (BUILT_IN_SQRT):
1990 errno_set = ! tree_expr_nonnegative_p (arg);
1991 builtin_optab = sqrt_optab;
1992 break;
1993 CASE_FLT_FN (BUILT_IN_EXP):
1994 errno_set = true; builtin_optab = exp_optab; break;
1995 CASE_FLT_FN (BUILT_IN_EXP10):
1996 CASE_FLT_FN (BUILT_IN_POW10):
1997 errno_set = true; builtin_optab = exp10_optab; break;
1998 CASE_FLT_FN (BUILT_IN_EXP2):
1999 errno_set = true; builtin_optab = exp2_optab; break;
2000 CASE_FLT_FN (BUILT_IN_EXPM1):
2001 errno_set = true; builtin_optab = expm1_optab; break;
2002 CASE_FLT_FN (BUILT_IN_LOGB):
2003 errno_set = true; builtin_optab = logb_optab; break;
2004 CASE_FLT_FN (BUILT_IN_LOG):
2005 errno_set = true; builtin_optab = log_optab; break;
2006 CASE_FLT_FN (BUILT_IN_LOG10):
2007 errno_set = true; builtin_optab = log10_optab; break;
2008 CASE_FLT_FN (BUILT_IN_LOG2):
2009 errno_set = true; builtin_optab = log2_optab; break;
2010 CASE_FLT_FN (BUILT_IN_LOG1P):
2011 errno_set = true; builtin_optab = log1p_optab; break;
2012 CASE_FLT_FN (BUILT_IN_ASIN):
2013 builtin_optab = asin_optab; break;
2014 CASE_FLT_FN (BUILT_IN_ACOS):
2015 builtin_optab = acos_optab; break;
2016 CASE_FLT_FN (BUILT_IN_TAN):
2017 builtin_optab = tan_optab; break;
2018 CASE_FLT_FN (BUILT_IN_ATAN):
2019 builtin_optab = atan_optab; break;
2020 CASE_FLT_FN (BUILT_IN_FLOOR):
2021 builtin_optab = floor_optab; break;
2022 CASE_FLT_FN (BUILT_IN_CEIL):
2023 builtin_optab = ceil_optab; break;
2024 CASE_FLT_FN (BUILT_IN_TRUNC):
2025 builtin_optab = btrunc_optab; break;
2026 CASE_FLT_FN (BUILT_IN_ROUND):
2027 builtin_optab = round_optab; break;
2028 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2029 builtin_optab = nearbyint_optab;
2030 if (flag_trapping_math)
2031 break;
2032 /* Else fallthrough and expand as rint. */
2033 CASE_FLT_FN (BUILT_IN_RINT):
2034 builtin_optab = rint_optab; break;
2035 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2036 builtin_optab = significand_optab; break;
2037 default:
2038 gcc_unreachable ();
2041 /* Make a suitable register to place result in. */
2042 mode = TYPE_MODE (TREE_TYPE (exp));
2044 if (! flag_errno_math || ! HONOR_NANS (mode))
2045 errno_set = false;
2047 /* Before working hard, check whether the instruction is available. */
2048 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2049 && (!errno_set || !optimize_insn_for_size_p ()))
2051 target = gen_reg_rtx (mode);
2053 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2054 need to expand the argument again. This way, we will not perform
2055 side-effects more the once. */
2056 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2058 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2060 start_sequence ();
2062 /* Compute into TARGET.
2063 Set TARGET to wherever the result comes back. */
2064 target = expand_unop (mode, builtin_optab, op0, target, 0);
2066 if (target != 0)
2068 if (errno_set)
2069 expand_errno_check (exp, target);
2071 /* Output the entire sequence. */
2072 insns = get_insns ();
2073 end_sequence ();
2074 emit_insn (insns);
2075 return target;
2078 /* If we were unable to expand via the builtin, stop the sequence
2079 (without outputting the insns) and call to the library function
2080 with the stabilized argument list. */
2081 end_sequence ();
2084 return expand_call (exp, target, target == const0_rtx);
2087 /* Expand a call to the builtin binary math functions (pow and atan2).
2088 Return NULL_RTX if a normal call should be emitted rather than expanding the
2089 function in-line. EXP is the expression that is a call to the builtin
2090 function; if convenient, the result should be placed in TARGET.
2091 SUBTARGET may be used as the target for computing one of EXP's
2092 operands. */
2094 static rtx
2095 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2097 optab builtin_optab;
2098 rtx op0, op1, insns;
2099 int op1_type = REAL_TYPE;
2100 tree fndecl = get_callee_fndecl (exp);
2101 tree arg0, arg1;
2102 enum machine_mode mode;
2103 bool errno_set = true;
2105 switch (DECL_FUNCTION_CODE (fndecl))
2107 CASE_FLT_FN (BUILT_IN_SCALBN):
2108 CASE_FLT_FN (BUILT_IN_SCALBLN):
2109 CASE_FLT_FN (BUILT_IN_LDEXP):
2110 op1_type = INTEGER_TYPE;
2111 default:
2112 break;
2115 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2116 return NULL_RTX;
2118 arg0 = CALL_EXPR_ARG (exp, 0);
2119 arg1 = CALL_EXPR_ARG (exp, 1);
2121 switch (DECL_FUNCTION_CODE (fndecl))
2123 CASE_FLT_FN (BUILT_IN_POW):
2124 builtin_optab = pow_optab; break;
2125 CASE_FLT_FN (BUILT_IN_ATAN2):
2126 builtin_optab = atan2_optab; break;
2127 CASE_FLT_FN (BUILT_IN_SCALB):
2128 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2129 return 0;
2130 builtin_optab = scalb_optab; break;
2131 CASE_FLT_FN (BUILT_IN_SCALBN):
2132 CASE_FLT_FN (BUILT_IN_SCALBLN):
2133 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2134 return 0;
2135 /* Fall through... */
2136 CASE_FLT_FN (BUILT_IN_LDEXP):
2137 builtin_optab = ldexp_optab; break;
2138 CASE_FLT_FN (BUILT_IN_FMOD):
2139 builtin_optab = fmod_optab; break;
2140 CASE_FLT_FN (BUILT_IN_REMAINDER):
2141 CASE_FLT_FN (BUILT_IN_DREM):
2142 builtin_optab = remainder_optab; break;
2143 default:
2144 gcc_unreachable ();
2147 /* Make a suitable register to place result in. */
2148 mode = TYPE_MODE (TREE_TYPE (exp));
2150 /* Before working hard, check whether the instruction is available. */
2151 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2152 return NULL_RTX;
2154 target = gen_reg_rtx (mode);
2156 if (! flag_errno_math || ! HONOR_NANS (mode))
2157 errno_set = false;
2159 if (errno_set && optimize_insn_for_size_p ())
2160 return 0;
2162 /* Always stabilize the argument list. */
2163 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2164 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2166 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2167 op1 = expand_normal (arg1);
2169 start_sequence ();
2171 /* Compute into TARGET.
2172 Set TARGET to wherever the result comes back. */
2173 target = expand_binop (mode, builtin_optab, op0, op1,
2174 target, 0, OPTAB_DIRECT);
2176 /* If we were unable to expand via the builtin, stop the sequence
2177 (without outputting the insns) and call to the library function
2178 with the stabilized argument list. */
2179 if (target == 0)
2181 end_sequence ();
2182 return expand_call (exp, target, target == const0_rtx);
2185 if (errno_set)
2186 expand_errno_check (exp, target);
2188 /* Output the entire sequence. */
2189 insns = get_insns ();
2190 end_sequence ();
2191 emit_insn (insns);
2193 return target;
2196 /* Expand a call to the builtin trinary math functions (fma).
2197 Return NULL_RTX if a normal call should be emitted rather than expanding the
2198 function in-line. EXP is the expression that is a call to the builtin
2199 function; if convenient, the result should be placed in TARGET.
2200 SUBTARGET may be used as the target for computing one of EXP's
2201 operands. */
2203 static rtx
2204 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2206 optab builtin_optab;
2207 rtx op0, op1, op2, insns;
2208 tree fndecl = get_callee_fndecl (exp);
2209 tree arg0, arg1, arg2;
2210 enum machine_mode mode;
2212 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2213 return NULL_RTX;
2215 arg0 = CALL_EXPR_ARG (exp, 0);
2216 arg1 = CALL_EXPR_ARG (exp, 1);
2217 arg2 = CALL_EXPR_ARG (exp, 2);
2219 switch (DECL_FUNCTION_CODE (fndecl))
2221 CASE_FLT_FN (BUILT_IN_FMA):
2222 builtin_optab = fma_optab; break;
2223 default:
2224 gcc_unreachable ();
2227 /* Make a suitable register to place result in. */
2228 mode = TYPE_MODE (TREE_TYPE (exp));
2230 /* Before working hard, check whether the instruction is available. */
2231 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2232 return NULL_RTX;
2234 target = gen_reg_rtx (mode);
2236 /* Always stabilize the argument list. */
2237 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2238 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2239 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2241 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2242 op1 = expand_normal (arg1);
2243 op2 = expand_normal (arg2);
2245 start_sequence ();
2247 /* Compute into TARGET.
2248 Set TARGET to wherever the result comes back. */
2249 target = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2250 target, 0);
2252 /* If we were unable to expand via the builtin, stop the sequence
2253 (without outputting the insns) and call to the library function
2254 with the stabilized argument list. */
2255 if (target == 0)
2257 end_sequence ();
2258 return expand_call (exp, target, target == const0_rtx);
2261 /* Output the entire sequence. */
2262 insns = get_insns ();
2263 end_sequence ();
2264 emit_insn (insns);
2266 return target;
2269 /* Expand a call to the builtin sin and cos math functions.
2270 Return NULL_RTX if a normal call should be emitted rather than expanding the
2271 function in-line. EXP is the expression that is a call to the builtin
2272 function; if convenient, the result should be placed in TARGET.
2273 SUBTARGET may be used as the target for computing one of EXP's
2274 operands. */
2276 static rtx
2277 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2279 optab builtin_optab;
2280 rtx op0, insns;
2281 tree fndecl = get_callee_fndecl (exp);
2282 enum machine_mode mode;
2283 tree arg;
2285 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2286 return NULL_RTX;
2288 arg = CALL_EXPR_ARG (exp, 0);
2290 switch (DECL_FUNCTION_CODE (fndecl))
2292 CASE_FLT_FN (BUILT_IN_SIN):
2293 CASE_FLT_FN (BUILT_IN_COS):
2294 builtin_optab = sincos_optab; break;
2295 default:
2296 gcc_unreachable ();
2299 /* Make a suitable register to place result in. */
2300 mode = TYPE_MODE (TREE_TYPE (exp));
2302 /* Check if sincos insn is available, otherwise fallback
2303 to sin or cos insn. */
2304 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2305 switch (DECL_FUNCTION_CODE (fndecl))
2307 CASE_FLT_FN (BUILT_IN_SIN):
2308 builtin_optab = sin_optab; break;
2309 CASE_FLT_FN (BUILT_IN_COS):
2310 builtin_optab = cos_optab; break;
2311 default:
2312 gcc_unreachable ();
2315 /* Before working hard, check whether the instruction is available. */
2316 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2318 target = gen_reg_rtx (mode);
2320 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2321 need to expand the argument again. This way, we will not perform
2322 side-effects more the once. */
2323 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2325 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2327 start_sequence ();
2329 /* Compute into TARGET.
2330 Set TARGET to wherever the result comes back. */
2331 if (builtin_optab == sincos_optab)
2333 int result;
2335 switch (DECL_FUNCTION_CODE (fndecl))
2337 CASE_FLT_FN (BUILT_IN_SIN):
2338 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2339 break;
2340 CASE_FLT_FN (BUILT_IN_COS):
2341 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2342 break;
2343 default:
2344 gcc_unreachable ();
2346 gcc_assert (result);
2348 else
2350 target = expand_unop (mode, builtin_optab, op0, target, 0);
2353 if (target != 0)
2355 /* Output the entire sequence. */
2356 insns = get_insns ();
2357 end_sequence ();
2358 emit_insn (insns);
2359 return target;
2362 /* If we were unable to expand via the builtin, stop the sequence
2363 (without outputting the insns) and call to the library function
2364 with the stabilized argument list. */
2365 end_sequence ();
2368 target = expand_call (exp, target, target == const0_rtx);
2370 return target;
2373 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2374 return an RTL instruction code that implements the functionality.
2375 If that isn't possible or available return CODE_FOR_nothing. */
2377 static enum insn_code
2378 interclass_mathfn_icode (tree arg, tree fndecl)
2380 bool errno_set = false;
2381 optab builtin_optab = 0;
2382 enum machine_mode mode;
2384 switch (DECL_FUNCTION_CODE (fndecl))
2386 CASE_FLT_FN (BUILT_IN_ILOGB):
2387 errno_set = true; builtin_optab = ilogb_optab; break;
2388 CASE_FLT_FN (BUILT_IN_ISINF):
2389 builtin_optab = isinf_optab; break;
2390 case BUILT_IN_ISNORMAL:
2391 case BUILT_IN_ISFINITE:
2392 CASE_FLT_FN (BUILT_IN_FINITE):
2393 case BUILT_IN_FINITED32:
2394 case BUILT_IN_FINITED64:
2395 case BUILT_IN_FINITED128:
2396 case BUILT_IN_ISINFD32:
2397 case BUILT_IN_ISINFD64:
2398 case BUILT_IN_ISINFD128:
2399 /* These builtins have no optabs (yet). */
2400 break;
2401 default:
2402 gcc_unreachable ();
2405 /* There's no easy way to detect the case we need to set EDOM. */
2406 if (flag_errno_math && errno_set)
2407 return CODE_FOR_nothing;
2409 /* Optab mode depends on the mode of the input argument. */
2410 mode = TYPE_MODE (TREE_TYPE (arg));
2412 if (builtin_optab)
2413 return optab_handler (builtin_optab, mode);
2414 return CODE_FOR_nothing;
2417 /* Expand a call to one of the builtin math functions that operate on
2418 floating point argument and output an integer result (ilogb, isinf,
2419 isnan, etc).
2420 Return 0 if a normal call should be emitted rather than expanding the
2421 function in-line. EXP is the expression that is a call to the builtin
2422 function; if convenient, the result should be placed in TARGET. */
2424 static rtx
2425 expand_builtin_interclass_mathfn (tree exp, rtx target)
2427 enum insn_code icode = CODE_FOR_nothing;
2428 rtx op0;
2429 tree fndecl = get_callee_fndecl (exp);
2430 enum machine_mode mode;
2431 tree arg;
2433 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2434 return NULL_RTX;
2436 arg = CALL_EXPR_ARG (exp, 0);
2437 icode = interclass_mathfn_icode (arg, fndecl);
2438 mode = TYPE_MODE (TREE_TYPE (arg));
2440 if (icode != CODE_FOR_nothing)
2442 struct expand_operand ops[1];
2443 rtx last = get_last_insn ();
2444 tree orig_arg = arg;
2446 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2447 need to expand the argument again. This way, we will not perform
2448 side-effects more the once. */
2449 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2451 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2453 if (mode != GET_MODE (op0))
2454 op0 = convert_to_mode (mode, op0, 0);
2456 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2457 if (maybe_legitimize_operands (icode, 0, 1, ops)
2458 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2459 return ops[0].value;
2461 delete_insns_since (last);
2462 CALL_EXPR_ARG (exp, 0) = orig_arg;
2465 return NULL_RTX;
2468 /* Expand a call to the builtin sincos math function.
2469 Return NULL_RTX if a normal call should be emitted rather than expanding the
2470 function in-line. EXP is the expression that is a call to the builtin
2471 function. */
2473 static rtx
2474 expand_builtin_sincos (tree exp)
2476 rtx op0, op1, op2, target1, target2;
2477 enum machine_mode mode;
2478 tree arg, sinp, cosp;
2479 int result;
2480 location_t loc = EXPR_LOCATION (exp);
2481 tree alias_type, alias_off;
2483 if (!validate_arglist (exp, REAL_TYPE,
2484 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2485 return NULL_RTX;
2487 arg = CALL_EXPR_ARG (exp, 0);
2488 sinp = CALL_EXPR_ARG (exp, 1);
2489 cosp = CALL_EXPR_ARG (exp, 2);
2491 /* Make a suitable register to place result in. */
2492 mode = TYPE_MODE (TREE_TYPE (arg));
2494 /* Check if sincos insn is available, otherwise emit the call. */
2495 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2496 return NULL_RTX;
2498 target1 = gen_reg_rtx (mode);
2499 target2 = gen_reg_rtx (mode);
2501 op0 = expand_normal (arg);
2502 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2503 alias_off = build_int_cst (alias_type, 0);
2504 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2505 sinp, alias_off));
2506 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2507 cosp, alias_off));
2509 /* Compute into target1 and target2.
2510 Set TARGET to wherever the result comes back. */
2511 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2512 gcc_assert (result);
2514 /* Move target1 and target2 to the memory locations indicated
2515 by op1 and op2. */
2516 emit_move_insn (op1, target1);
2517 emit_move_insn (op2, target2);
2519 return const0_rtx;
2522 /* Expand a call to the internal cexpi builtin to the sincos math function.
2523 EXP is the expression that is a call to the builtin function; if convenient,
2524 the result should be placed in TARGET. */
2526 static rtx
2527 expand_builtin_cexpi (tree exp, rtx target)
2529 tree fndecl = get_callee_fndecl (exp);
2530 tree arg, type;
2531 enum machine_mode mode;
2532 rtx op0, op1, op2;
2533 location_t loc = EXPR_LOCATION (exp);
2535 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2536 return NULL_RTX;
2538 arg = CALL_EXPR_ARG (exp, 0);
2539 type = TREE_TYPE (arg);
2540 mode = TYPE_MODE (TREE_TYPE (arg));
2542 /* Try expanding via a sincos optab, fall back to emitting a libcall
2543 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2544 is only generated from sincos, cexp or if we have either of them. */
2545 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2547 op1 = gen_reg_rtx (mode);
2548 op2 = gen_reg_rtx (mode);
2550 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2552 /* Compute into op1 and op2. */
2553 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2555 else if (TARGET_HAS_SINCOS)
2557 tree call, fn = NULL_TREE;
2558 tree top1, top2;
2559 rtx op1a, op2a;
2561 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2562 fn = built_in_decls[BUILT_IN_SINCOSF];
2563 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2564 fn = built_in_decls[BUILT_IN_SINCOS];
2565 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2566 fn = built_in_decls[BUILT_IN_SINCOSL];
2567 else
2568 gcc_unreachable ();
2570 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2571 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2572 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2573 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2574 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2575 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2577 /* Make sure not to fold the sincos call again. */
2578 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2579 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2580 call, 3, arg, top1, top2));
2582 else
2584 tree call, fn = NULL_TREE, narg;
2585 tree ctype = build_complex_type (type);
2587 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2588 fn = built_in_decls[BUILT_IN_CEXPF];
2589 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2590 fn = built_in_decls[BUILT_IN_CEXP];
2591 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2592 fn = built_in_decls[BUILT_IN_CEXPL];
2593 else
2594 gcc_unreachable ();
2596 /* If we don't have a decl for cexp create one. This is the
2597 friendliest fallback if the user calls __builtin_cexpi
2598 without full target C99 function support. */
2599 if (fn == NULL_TREE)
2601 tree fntype;
2602 const char *name = NULL;
2604 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2605 name = "cexpf";
2606 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2607 name = "cexp";
2608 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2609 name = "cexpl";
2611 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2612 fn = build_fn_decl (name, fntype);
2615 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2616 build_real (type, dconst0), arg);
2618 /* Make sure not to fold the cexp call again. */
2619 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2620 return expand_expr (build_call_nary (ctype, call, 1, narg),
2621 target, VOIDmode, EXPAND_NORMAL);
2624 /* Now build the proper return type. */
2625 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2626 make_tree (TREE_TYPE (arg), op2),
2627 make_tree (TREE_TYPE (arg), op1)),
2628 target, VOIDmode, EXPAND_NORMAL);
2631 /* Conveniently construct a function call expression. FNDECL names the
2632 function to be called, N is the number of arguments, and the "..."
2633 parameters are the argument expressions. Unlike build_call_exr
2634 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2636 static tree
2637 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2639 va_list ap;
2640 tree fntype = TREE_TYPE (fndecl);
2641 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2643 va_start (ap, n);
2644 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2645 va_end (ap);
2646 SET_EXPR_LOCATION (fn, loc);
2647 return fn;
2650 /* Expand a call to one of the builtin rounding functions gcc defines
2651 as an extension (lfloor and lceil). As these are gcc extensions we
2652 do not need to worry about setting errno to EDOM.
2653 If expanding via optab fails, lower expression to (int)(floor(x)).
2654 EXP is the expression that is a call to the builtin function;
2655 if convenient, the result should be placed in TARGET. */
2657 static rtx
2658 expand_builtin_int_roundingfn (tree exp, rtx target)
2660 convert_optab builtin_optab;
2661 rtx op0, insns, tmp;
2662 tree fndecl = get_callee_fndecl (exp);
2663 enum built_in_function fallback_fn;
2664 tree fallback_fndecl;
2665 enum machine_mode mode;
2666 tree arg;
2668 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2669 gcc_unreachable ();
2671 arg = CALL_EXPR_ARG (exp, 0);
2673 switch (DECL_FUNCTION_CODE (fndecl))
2675 CASE_FLT_FN (BUILT_IN_LCEIL):
2676 CASE_FLT_FN (BUILT_IN_LLCEIL):
2677 builtin_optab = lceil_optab;
2678 fallback_fn = BUILT_IN_CEIL;
2679 break;
2681 CASE_FLT_FN (BUILT_IN_LFLOOR):
2682 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2683 builtin_optab = lfloor_optab;
2684 fallback_fn = BUILT_IN_FLOOR;
2685 break;
2687 default:
2688 gcc_unreachable ();
2691 /* Make a suitable register to place result in. */
2692 mode = TYPE_MODE (TREE_TYPE (exp));
2694 target = gen_reg_rtx (mode);
2696 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2697 need to expand the argument again. This way, we will not perform
2698 side-effects more the once. */
2699 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2701 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2703 start_sequence ();
2705 /* Compute into TARGET. */
2706 if (expand_sfix_optab (target, op0, builtin_optab))
2708 /* Output the entire sequence. */
2709 insns = get_insns ();
2710 end_sequence ();
2711 emit_insn (insns);
2712 return target;
2715 /* If we were unable to expand via the builtin, stop the sequence
2716 (without outputting the insns). */
2717 end_sequence ();
2719 /* Fall back to floating point rounding optab. */
2720 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2722 /* For non-C99 targets we may end up without a fallback fndecl here
2723 if the user called __builtin_lfloor directly. In this case emit
2724 a call to the floor/ceil variants nevertheless. This should result
2725 in the best user experience for not full C99 targets. */
2726 if (fallback_fndecl == NULL_TREE)
2728 tree fntype;
2729 const char *name = NULL;
2731 switch (DECL_FUNCTION_CODE (fndecl))
2733 case BUILT_IN_LCEIL:
2734 case BUILT_IN_LLCEIL:
2735 name = "ceil";
2736 break;
2737 case BUILT_IN_LCEILF:
2738 case BUILT_IN_LLCEILF:
2739 name = "ceilf";
2740 break;
2741 case BUILT_IN_LCEILL:
2742 case BUILT_IN_LLCEILL:
2743 name = "ceill";
2744 break;
2745 case BUILT_IN_LFLOOR:
2746 case BUILT_IN_LLFLOOR:
2747 name = "floor";
2748 break;
2749 case BUILT_IN_LFLOORF:
2750 case BUILT_IN_LLFLOORF:
2751 name = "floorf";
2752 break;
2753 case BUILT_IN_LFLOORL:
2754 case BUILT_IN_LLFLOORL:
2755 name = "floorl";
2756 break;
2757 default:
2758 gcc_unreachable ();
2761 fntype = build_function_type_list (TREE_TYPE (arg),
2762 TREE_TYPE (arg), NULL_TREE);
2763 fallback_fndecl = build_fn_decl (name, fntype);
2766 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2768 tmp = expand_normal (exp);
2770 /* Truncate the result of floating point optab to integer
2771 via expand_fix (). */
2772 target = gen_reg_rtx (mode);
2773 expand_fix (target, tmp, 0);
2775 return target;
2778 /* Expand a call to one of the builtin math functions doing integer
2779 conversion (lrint).
2780 Return 0 if a normal call should be emitted rather than expanding the
2781 function in-line. EXP is the expression that is a call to the builtin
2782 function; if convenient, the result should be placed in TARGET. */
2784 static rtx
2785 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2787 convert_optab builtin_optab;
2788 rtx op0, insns;
2789 tree fndecl = get_callee_fndecl (exp);
2790 tree arg;
2791 enum machine_mode mode;
2793 /* There's no easy way to detect the case we need to set EDOM. */
2794 if (flag_errno_math)
2795 return NULL_RTX;
2797 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2798 gcc_unreachable ();
2800 arg = CALL_EXPR_ARG (exp, 0);
2802 switch (DECL_FUNCTION_CODE (fndecl))
2804 CASE_FLT_FN (BUILT_IN_LRINT):
2805 CASE_FLT_FN (BUILT_IN_LLRINT):
2806 builtin_optab = lrint_optab; break;
2807 CASE_FLT_FN (BUILT_IN_LROUND):
2808 CASE_FLT_FN (BUILT_IN_LLROUND):
2809 builtin_optab = lround_optab; break;
2810 default:
2811 gcc_unreachable ();
2814 /* Make a suitable register to place result in. */
2815 mode = TYPE_MODE (TREE_TYPE (exp));
2817 target = gen_reg_rtx (mode);
2819 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2820 need to expand the argument again. This way, we will not perform
2821 side-effects more the once. */
2822 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2824 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2826 start_sequence ();
2828 if (expand_sfix_optab (target, op0, builtin_optab))
2830 /* Output the entire sequence. */
2831 insns = get_insns ();
2832 end_sequence ();
2833 emit_insn (insns);
2834 return target;
2837 /* If we were unable to expand via the builtin, stop the sequence
2838 (without outputting the insns) and call to the library function
2839 with the stabilized argument list. */
2840 end_sequence ();
2842 target = expand_call (exp, target, target == const0_rtx);
2844 return target;
2847 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2848 a normal call should be emitted rather than expanding the function
2849 in-line. EXP is the expression that is a call to the builtin
2850 function; if convenient, the result should be placed in TARGET. */
2852 static rtx
2853 expand_builtin_powi (tree exp, rtx target)
2855 tree arg0, arg1;
2856 rtx op0, op1;
2857 enum machine_mode mode;
2858 enum machine_mode mode2;
2860 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2861 return NULL_RTX;
2863 arg0 = CALL_EXPR_ARG (exp, 0);
2864 arg1 = CALL_EXPR_ARG (exp, 1);
2865 mode = TYPE_MODE (TREE_TYPE (exp));
2867 /* Emit a libcall to libgcc. */
2869 /* Mode of the 2nd argument must match that of an int. */
2870 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2872 if (target == NULL_RTX)
2873 target = gen_reg_rtx (mode);
2875 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2876 if (GET_MODE (op0) != mode)
2877 op0 = convert_to_mode (mode, op0, 0);
2878 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2879 if (GET_MODE (op1) != mode2)
2880 op1 = convert_to_mode (mode2, op1, 0);
2882 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2883 target, LCT_CONST, mode, 2,
2884 op0, mode, op1, mode2);
2886 return target;
2889 /* Expand expression EXP which is a call to the strlen builtin. Return
2890 NULL_RTX if we failed the caller should emit a normal call, otherwise
2891 try to get the result in TARGET, if convenient. */
2893 static rtx
2894 expand_builtin_strlen (tree exp, rtx target,
2895 enum machine_mode target_mode)
2897 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2898 return NULL_RTX;
2899 else
2901 struct expand_operand ops[4];
2902 rtx pat;
2903 tree len;
2904 tree src = CALL_EXPR_ARG (exp, 0);
2905 rtx src_reg, before_strlen;
2906 enum machine_mode insn_mode = target_mode;
2907 enum insn_code icode = CODE_FOR_nothing;
2908 unsigned int align;
2910 /* If the length can be computed at compile-time, return it. */
2911 len = c_strlen (src, 0);
2912 if (len)
2913 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2915 /* If the length can be computed at compile-time and is constant
2916 integer, but there are side-effects in src, evaluate
2917 src for side-effects, then return len.
2918 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2919 can be optimized into: i++; x = 3; */
2920 len = c_strlen (src, 1);
2921 if (len && TREE_CODE (len) == INTEGER_CST)
2923 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2924 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2927 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2929 /* If SRC is not a pointer type, don't do this operation inline. */
2930 if (align == 0)
2931 return NULL_RTX;
2933 /* Bail out if we can't compute strlen in the right mode. */
2934 while (insn_mode != VOIDmode)
2936 icode = optab_handler (strlen_optab, insn_mode);
2937 if (icode != CODE_FOR_nothing)
2938 break;
2940 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2942 if (insn_mode == VOIDmode)
2943 return NULL_RTX;
2945 /* Make a place to hold the source address. We will not expand
2946 the actual source until we are sure that the expansion will
2947 not fail -- there are trees that cannot be expanded twice. */
2948 src_reg = gen_reg_rtx (Pmode);
2950 /* Mark the beginning of the strlen sequence so we can emit the
2951 source operand later. */
2952 before_strlen = get_last_insn ();
2954 create_output_operand (&ops[0], target, insn_mode);
2955 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2956 create_integer_operand (&ops[2], 0);
2957 create_integer_operand (&ops[3], align);
2958 if (!maybe_expand_insn (icode, 4, ops))
2959 return NULL_RTX;
2961 /* Now that we are assured of success, expand the source. */
2962 start_sequence ();
2963 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2964 if (pat != src_reg)
2966 #ifdef POINTERS_EXTEND_UNSIGNED
2967 if (GET_MODE (pat) != Pmode)
2968 pat = convert_to_mode (Pmode, pat,
2969 POINTERS_EXTEND_UNSIGNED);
2970 #endif
2971 emit_move_insn (src_reg, pat);
2973 pat = get_insns ();
2974 end_sequence ();
2976 if (before_strlen)
2977 emit_insn_after (pat, before_strlen);
2978 else
2979 emit_insn_before (pat, get_insns ());
2981 /* Return the value in the proper mode for this function. */
2982 if (GET_MODE (ops[0].value) == target_mode)
2983 target = ops[0].value;
2984 else if (target != 0)
2985 convert_move (target, ops[0].value, 0);
2986 else
2987 target = convert_to_mode (target_mode, ops[0].value, 0);
2989 return target;
2993 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2994 bytes from constant string DATA + OFFSET and return it as target
2995 constant. */
2997 static rtx
2998 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2999 enum machine_mode mode)
3001 const char *str = (const char *) data;
3003 gcc_assert (offset >= 0
3004 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3005 <= strlen (str) + 1));
3007 return c_readstr (str + offset, mode);
3010 /* Expand a call EXP to the memcpy builtin.
3011 Return NULL_RTX if we failed, the caller should emit a normal call,
3012 otherwise try to get the result in TARGET, if convenient (and in
3013 mode MODE if that's convenient). */
3015 static rtx
3016 expand_builtin_memcpy (tree exp, rtx target)
3018 if (!validate_arglist (exp,
3019 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3020 return NULL_RTX;
3021 else
3023 tree dest = CALL_EXPR_ARG (exp, 0);
3024 tree src = CALL_EXPR_ARG (exp, 1);
3025 tree len = CALL_EXPR_ARG (exp, 2);
3026 const char *src_str;
3027 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3028 unsigned int dest_align
3029 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3030 rtx dest_mem, src_mem, dest_addr, len_rtx;
3031 HOST_WIDE_INT expected_size = -1;
3032 unsigned int expected_align = 0;
3034 /* If DEST is not a pointer type, call the normal function. */
3035 if (dest_align == 0)
3036 return NULL_RTX;
3038 /* If either SRC is not a pointer type, don't do this
3039 operation in-line. */
3040 if (src_align == 0)
3041 return NULL_RTX;
3043 if (currently_expanding_gimple_stmt)
3044 stringop_block_profile (currently_expanding_gimple_stmt,
3045 &expected_align, &expected_size);
3047 if (expected_align < dest_align)
3048 expected_align = dest_align;
3049 dest_mem = get_memory_rtx (dest, len);
3050 set_mem_align (dest_mem, dest_align);
3051 len_rtx = expand_normal (len);
3052 src_str = c_getstr (src);
3054 /* If SRC is a string constant and block move would be done
3055 by pieces, we can avoid loading the string from memory
3056 and only stored the computed constants. */
3057 if (src_str
3058 && CONST_INT_P (len_rtx)
3059 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3060 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3061 CONST_CAST (char *, src_str),
3062 dest_align, false))
3064 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3065 builtin_memcpy_read_str,
3066 CONST_CAST (char *, src_str),
3067 dest_align, false, 0);
3068 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3069 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3070 return dest_mem;
3073 src_mem = get_memory_rtx (src, len);
3074 set_mem_align (src_mem, src_align);
3076 /* Copy word part most expediently. */
3077 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3078 CALL_EXPR_TAILCALL (exp)
3079 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3080 expected_align, expected_size);
3082 if (dest_addr == 0)
3084 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3085 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3087 return dest_addr;
3091 /* Expand a call EXP to the mempcpy builtin.
3092 Return NULL_RTX if we failed; the caller should emit a normal call,
3093 otherwise try to get the result in TARGET, if convenient (and in
3094 mode MODE if that's convenient). If ENDP is 0 return the
3095 destination pointer, if ENDP is 1 return the end pointer ala
3096 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3097 stpcpy. */
3099 static rtx
3100 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3102 if (!validate_arglist (exp,
3103 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3104 return NULL_RTX;
3105 else
3107 tree dest = CALL_EXPR_ARG (exp, 0);
3108 tree src = CALL_EXPR_ARG (exp, 1);
3109 tree len = CALL_EXPR_ARG (exp, 2);
3110 return expand_builtin_mempcpy_args (dest, src, len,
3111 target, mode, /*endp=*/ 1);
3115 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3116 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3117 so that this can also be called without constructing an actual CALL_EXPR.
3118 The other arguments and return value are the same as for
3119 expand_builtin_mempcpy. */
3121 static rtx
3122 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3123 rtx target, enum machine_mode mode, int endp)
3125 /* If return value is ignored, transform mempcpy into memcpy. */
3126 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3128 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3129 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3130 dest, src, len);
3131 return expand_expr (result, target, mode, EXPAND_NORMAL);
3133 else
3135 const char *src_str;
3136 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3137 unsigned int dest_align
3138 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3139 rtx dest_mem, src_mem, len_rtx;
3141 /* If either SRC or DEST is not a pointer type, don't do this
3142 operation in-line. */
3143 if (dest_align == 0 || src_align == 0)
3144 return NULL_RTX;
3146 /* If LEN is not constant, call the normal function. */
3147 if (! host_integerp (len, 1))
3148 return NULL_RTX;
3150 len_rtx = expand_normal (len);
3151 src_str = c_getstr (src);
3153 /* If SRC is a string constant and block move would be done
3154 by pieces, we can avoid loading the string from memory
3155 and only stored the computed constants. */
3156 if (src_str
3157 && CONST_INT_P (len_rtx)
3158 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3159 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3160 CONST_CAST (char *, src_str),
3161 dest_align, false))
3163 dest_mem = get_memory_rtx (dest, len);
3164 set_mem_align (dest_mem, dest_align);
3165 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3166 builtin_memcpy_read_str,
3167 CONST_CAST (char *, src_str),
3168 dest_align, false, endp);
3169 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3170 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3171 return dest_mem;
3174 if (CONST_INT_P (len_rtx)
3175 && can_move_by_pieces (INTVAL (len_rtx),
3176 MIN (dest_align, src_align)))
3178 dest_mem = get_memory_rtx (dest, len);
3179 set_mem_align (dest_mem, dest_align);
3180 src_mem = get_memory_rtx (src, len);
3181 set_mem_align (src_mem, src_align);
3182 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3183 MIN (dest_align, src_align), endp);
3184 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3185 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3186 return dest_mem;
3189 return NULL_RTX;
3193 #ifndef HAVE_movstr
3194 # define HAVE_movstr 0
3195 # define CODE_FOR_movstr CODE_FOR_nothing
3196 #endif
3198 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3199 we failed, the caller should emit a normal call, otherwise try to
3200 get the result in TARGET, if convenient. If ENDP is 0 return the
3201 destination pointer, if ENDP is 1 return the end pointer ala
3202 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3203 stpcpy. */
3205 static rtx
3206 expand_movstr (tree dest, tree src, rtx target, int endp)
3208 struct expand_operand ops[3];
3209 rtx dest_mem;
3210 rtx src_mem;
3212 if (!HAVE_movstr)
3213 return NULL_RTX;
3215 dest_mem = get_memory_rtx (dest, NULL);
3216 src_mem = get_memory_rtx (src, NULL);
3217 if (!endp)
3219 target = force_reg (Pmode, XEXP (dest_mem, 0));
3220 dest_mem = replace_equiv_address (dest_mem, target);
3223 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3224 create_fixed_operand (&ops[1], dest_mem);
3225 create_fixed_operand (&ops[2], src_mem);
3226 expand_insn (CODE_FOR_movstr, 3, ops);
3228 if (endp && target != const0_rtx)
3230 target = ops[0].value;
3231 /* movstr is supposed to set end to the address of the NUL
3232 terminator. If the caller requested a mempcpy-like return value,
3233 adjust it. */
3234 if (endp == 1)
3236 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), target), 1);
3237 emit_move_insn (target, force_operand (tem, NULL_RTX));
3240 return target;
3243 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3244 NULL_RTX if we failed the caller should emit a normal call, otherwise
3245 try to get the result in TARGET, if convenient (and in mode MODE if that's
3246 convenient). */
3248 static rtx
3249 expand_builtin_strcpy (tree exp, rtx target)
3251 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3253 tree dest = CALL_EXPR_ARG (exp, 0);
3254 tree src = CALL_EXPR_ARG (exp, 1);
3255 return expand_builtin_strcpy_args (dest, src, target);
3257 return NULL_RTX;
3260 /* Helper function to do the actual work for expand_builtin_strcpy. The
3261 arguments to the builtin_strcpy call DEST and SRC are broken out
3262 so that this can also be called without constructing an actual CALL_EXPR.
3263 The other arguments and return value are the same as for
3264 expand_builtin_strcpy. */
3266 static rtx
3267 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3269 return expand_movstr (dest, src, target, /*endp=*/0);
3272 /* Expand a call EXP to the stpcpy builtin.
3273 Return NULL_RTX if we failed the caller should emit a normal call,
3274 otherwise try to get the result in TARGET, if convenient (and in
3275 mode MODE if that's convenient). */
3277 static rtx
3278 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3280 tree dst, src;
3281 location_t loc = EXPR_LOCATION (exp);
3283 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3284 return NULL_RTX;
3286 dst = CALL_EXPR_ARG (exp, 0);
3287 src = CALL_EXPR_ARG (exp, 1);
3289 /* If return value is ignored, transform stpcpy into strcpy. */
3290 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3292 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3293 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3294 return expand_expr (result, target, mode, EXPAND_NORMAL);
3296 else
3298 tree len, lenp1;
3299 rtx ret;
3301 /* Ensure we get an actual string whose length can be evaluated at
3302 compile-time, not an expression containing a string. This is
3303 because the latter will potentially produce pessimized code
3304 when used to produce the return value. */
3305 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3306 return expand_movstr (dst, src, target, /*endp=*/2);
3308 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3309 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3310 target, mode, /*endp=*/2);
3312 if (ret)
3313 return ret;
3315 if (TREE_CODE (len) == INTEGER_CST)
3317 rtx len_rtx = expand_normal (len);
3319 if (CONST_INT_P (len_rtx))
3321 ret = expand_builtin_strcpy_args (dst, src, target);
3323 if (ret)
3325 if (! target)
3327 if (mode != VOIDmode)
3328 target = gen_reg_rtx (mode);
3329 else
3330 target = gen_reg_rtx (GET_MODE (ret));
3332 if (GET_MODE (target) != GET_MODE (ret))
3333 ret = gen_lowpart (GET_MODE (target), ret);
3335 ret = plus_constant (ret, INTVAL (len_rtx));
3336 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3337 gcc_assert (ret);
3339 return target;
3344 return expand_movstr (dst, src, target, /*endp=*/2);
3348 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3349 bytes from constant string DATA + OFFSET and return it as target
3350 constant. */
3353 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3354 enum machine_mode mode)
3356 const char *str = (const char *) data;
3358 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3359 return const0_rtx;
3361 return c_readstr (str + offset, mode);
3364 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3365 NULL_RTX if we failed the caller should emit a normal call. */
3367 static rtx
3368 expand_builtin_strncpy (tree exp, rtx target)
3370 location_t loc = EXPR_LOCATION (exp);
3372 if (validate_arglist (exp,
3373 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3375 tree dest = CALL_EXPR_ARG (exp, 0);
3376 tree src = CALL_EXPR_ARG (exp, 1);
3377 tree len = CALL_EXPR_ARG (exp, 2);
3378 tree slen = c_strlen (src, 1);
3380 /* We must be passed a constant len and src parameter. */
3381 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3382 return NULL_RTX;
3384 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3386 /* We're required to pad with trailing zeros if the requested
3387 len is greater than strlen(s2)+1. In that case try to
3388 use store_by_pieces, if it fails, punt. */
3389 if (tree_int_cst_lt (slen, len))
3391 unsigned int dest_align
3392 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3393 const char *p = c_getstr (src);
3394 rtx dest_mem;
3396 if (!p || dest_align == 0 || !host_integerp (len, 1)
3397 || !can_store_by_pieces (tree_low_cst (len, 1),
3398 builtin_strncpy_read_str,
3399 CONST_CAST (char *, p),
3400 dest_align, false))
3401 return NULL_RTX;
3403 dest_mem = get_memory_rtx (dest, len);
3404 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3405 builtin_strncpy_read_str,
3406 CONST_CAST (char *, p), dest_align, false, 0);
3407 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3408 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3409 return dest_mem;
3412 return NULL_RTX;
3415 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3416 bytes from constant string DATA + OFFSET and return it as target
3417 constant. */
3420 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3421 enum machine_mode mode)
3423 const char *c = (const char *) data;
3424 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3426 memset (p, *c, GET_MODE_SIZE (mode));
3428 return c_readstr (p, mode);
3431 /* Callback routine for store_by_pieces. Return the RTL of a register
3432 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3433 char value given in the RTL register data. For example, if mode is
3434 4 bytes wide, return the RTL for 0x01010101*data. */
3436 static rtx
3437 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3438 enum machine_mode mode)
3440 rtx target, coeff;
3441 size_t size;
3442 char *p;
3444 size = GET_MODE_SIZE (mode);
3445 if (size == 1)
3446 return (rtx) data;
3448 p = XALLOCAVEC (char, size);
3449 memset (p, 1, size);
3450 coeff = c_readstr (p, mode);
3452 target = convert_to_mode (mode, (rtx) data, 1);
3453 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3454 return force_reg (mode, target);
3457 /* Expand expression EXP, which is a call to the memset builtin. Return
3458 NULL_RTX if we failed the caller should emit a normal call, otherwise
3459 try to get the result in TARGET, if convenient (and in mode MODE if that's
3460 convenient). */
3462 static rtx
3463 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3465 if (!validate_arglist (exp,
3466 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3467 return NULL_RTX;
3468 else
3470 tree dest = CALL_EXPR_ARG (exp, 0);
3471 tree val = CALL_EXPR_ARG (exp, 1);
3472 tree len = CALL_EXPR_ARG (exp, 2);
3473 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3477 /* Helper function to do the actual work for expand_builtin_memset. The
3478 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3479 so that this can also be called without constructing an actual CALL_EXPR.
3480 The other arguments and return value are the same as for
3481 expand_builtin_memset. */
3483 static rtx
3484 expand_builtin_memset_args (tree dest, tree val, tree len,
3485 rtx target, enum machine_mode mode, tree orig_exp)
3487 tree fndecl, fn;
3488 enum built_in_function fcode;
3489 enum machine_mode val_mode;
3490 char c;
3491 unsigned int dest_align;
3492 rtx dest_mem, dest_addr, len_rtx;
3493 HOST_WIDE_INT expected_size = -1;
3494 unsigned int expected_align = 0;
3496 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3498 /* If DEST is not a pointer type, don't do this operation in-line. */
3499 if (dest_align == 0)
3500 return NULL_RTX;
3502 if (currently_expanding_gimple_stmt)
3503 stringop_block_profile (currently_expanding_gimple_stmt,
3504 &expected_align, &expected_size);
3506 if (expected_align < dest_align)
3507 expected_align = dest_align;
3509 /* If the LEN parameter is zero, return DEST. */
3510 if (integer_zerop (len))
3512 /* Evaluate and ignore VAL in case it has side-effects. */
3513 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3514 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3517 /* Stabilize the arguments in case we fail. */
3518 dest = builtin_save_expr (dest);
3519 val = builtin_save_expr (val);
3520 len = builtin_save_expr (len);
3522 len_rtx = expand_normal (len);
3523 dest_mem = get_memory_rtx (dest, len);
3524 val_mode = TYPE_MODE (unsigned_char_type_node);
3526 if (TREE_CODE (val) != INTEGER_CST)
3528 rtx val_rtx;
3530 val_rtx = expand_normal (val);
3531 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3533 /* Assume that we can memset by pieces if we can store
3534 * the coefficients by pieces (in the required modes).
3535 * We can't pass builtin_memset_gen_str as that emits RTL. */
3536 c = 1;
3537 if (host_integerp (len, 1)
3538 && can_store_by_pieces (tree_low_cst (len, 1),
3539 builtin_memset_read_str, &c, dest_align,
3540 true))
3542 val_rtx = force_reg (val_mode, val_rtx);
3543 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3544 builtin_memset_gen_str, val_rtx, dest_align,
3545 true, 0);
3547 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3548 dest_align, expected_align,
3549 expected_size))
3550 goto do_libcall;
3552 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3553 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3554 return dest_mem;
3557 if (target_char_cast (val, &c))
3558 goto do_libcall;
3560 if (c)
3562 if (host_integerp (len, 1)
3563 && can_store_by_pieces (tree_low_cst (len, 1),
3564 builtin_memset_read_str, &c, dest_align,
3565 true))
3566 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3567 builtin_memset_read_str, &c, dest_align, true, 0);
3568 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3569 gen_int_mode (c, val_mode),
3570 dest_align, expected_align,
3571 expected_size))
3572 goto do_libcall;
3574 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3575 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3576 return dest_mem;
3579 set_mem_align (dest_mem, dest_align);
3580 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3581 CALL_EXPR_TAILCALL (orig_exp)
3582 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3583 expected_align, expected_size);
3585 if (dest_addr == 0)
3587 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3588 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3591 return dest_addr;
3593 do_libcall:
3594 fndecl = get_callee_fndecl (orig_exp);
3595 fcode = DECL_FUNCTION_CODE (fndecl);
3596 if (fcode == BUILT_IN_MEMSET)
3597 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3598 dest, val, len);
3599 else if (fcode == BUILT_IN_BZERO)
3600 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3601 dest, len);
3602 else
3603 gcc_unreachable ();
3604 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3605 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3606 return expand_call (fn, target, target == const0_rtx);
3609 /* Expand expression EXP, which is a call to the bzero builtin. Return
3610 NULL_RTX if we failed the caller should emit a normal call. */
3612 static rtx
3613 expand_builtin_bzero (tree exp)
3615 tree dest, size;
3616 location_t loc = EXPR_LOCATION (exp);
3618 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3619 return NULL_RTX;
3621 dest = CALL_EXPR_ARG (exp, 0);
3622 size = CALL_EXPR_ARG (exp, 1);
3624 /* New argument list transforming bzero(ptr x, int y) to
3625 memset(ptr x, int 0, size_t y). This is done this way
3626 so that if it isn't expanded inline, we fallback to
3627 calling bzero instead of memset. */
3629 return expand_builtin_memset_args (dest, integer_zero_node,
3630 fold_convert_loc (loc, sizetype, size),
3631 const0_rtx, VOIDmode, exp);
3634 /* Expand expression EXP, which is a call to the memcmp built-in function.
3635 Return NULL_RTX if we failed and the
3636 caller should emit a normal call, otherwise try to get the result in
3637 TARGET, if convenient (and in mode MODE, if that's convenient). */
3639 static rtx
3640 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3641 ATTRIBUTE_UNUSED enum machine_mode mode)
3643 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3645 if (!validate_arglist (exp,
3646 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3647 return NULL_RTX;
3649 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3651 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3652 rtx result;
3653 rtx insn;
3654 tree arg1 = CALL_EXPR_ARG (exp, 0);
3655 tree arg2 = CALL_EXPR_ARG (exp, 1);
3656 tree len = CALL_EXPR_ARG (exp, 2);
3658 unsigned int arg1_align
3659 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3660 unsigned int arg2_align
3661 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3662 enum machine_mode insn_mode;
3664 #ifdef HAVE_cmpmemsi
3665 if (HAVE_cmpmemsi)
3666 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3667 else
3668 #endif
3669 #ifdef HAVE_cmpstrnsi
3670 if (HAVE_cmpstrnsi)
3671 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3672 else
3673 #endif
3674 return NULL_RTX;
3676 /* If we don't have POINTER_TYPE, call the function. */
3677 if (arg1_align == 0 || arg2_align == 0)
3678 return NULL_RTX;
3680 /* Make a place to write the result of the instruction. */
3681 result = target;
3682 if (! (result != 0
3683 && REG_P (result) && GET_MODE (result) == insn_mode
3684 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3685 result = gen_reg_rtx (insn_mode);
3687 arg1_rtx = get_memory_rtx (arg1, len);
3688 arg2_rtx = get_memory_rtx (arg2, len);
3689 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3691 /* Set MEM_SIZE as appropriate. */
3692 if (CONST_INT_P (arg3_rtx))
3694 set_mem_size (arg1_rtx, arg3_rtx);
3695 set_mem_size (arg2_rtx, arg3_rtx);
3698 #ifdef HAVE_cmpmemsi
3699 if (HAVE_cmpmemsi)
3700 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3701 GEN_INT (MIN (arg1_align, arg2_align)));
3702 else
3703 #endif
3704 #ifdef HAVE_cmpstrnsi
3705 if (HAVE_cmpstrnsi)
3706 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3707 GEN_INT (MIN (arg1_align, arg2_align)));
3708 else
3709 #endif
3710 gcc_unreachable ();
3712 if (insn)
3713 emit_insn (insn);
3714 else
3715 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3716 TYPE_MODE (integer_type_node), 3,
3717 XEXP (arg1_rtx, 0), Pmode,
3718 XEXP (arg2_rtx, 0), Pmode,
3719 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3720 TYPE_UNSIGNED (sizetype)),
3721 TYPE_MODE (sizetype));
3723 /* Return the value in the proper mode for this function. */
3724 mode = TYPE_MODE (TREE_TYPE (exp));
3725 if (GET_MODE (result) == mode)
3726 return result;
3727 else if (target != 0)
3729 convert_move (target, result, 0);
3730 return target;
3732 else
3733 return convert_to_mode (mode, result, 0);
3735 #endif
3737 return NULL_RTX;
3740 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3741 if we failed the caller should emit a normal call, otherwise try to get
3742 the result in TARGET, if convenient. */
3744 static rtx
3745 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3747 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3748 return NULL_RTX;
3750 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3751 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3752 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3754 rtx arg1_rtx, arg2_rtx;
3755 rtx result, insn = NULL_RTX;
3756 tree fndecl, fn;
3757 tree arg1 = CALL_EXPR_ARG (exp, 0);
3758 tree arg2 = CALL_EXPR_ARG (exp, 1);
3760 unsigned int arg1_align
3761 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3762 unsigned int arg2_align
3763 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3765 /* If we don't have POINTER_TYPE, call the function. */
3766 if (arg1_align == 0 || arg2_align == 0)
3767 return NULL_RTX;
3769 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3770 arg1 = builtin_save_expr (arg1);
3771 arg2 = builtin_save_expr (arg2);
3773 arg1_rtx = get_memory_rtx (arg1, NULL);
3774 arg2_rtx = get_memory_rtx (arg2, NULL);
3776 #ifdef HAVE_cmpstrsi
3777 /* Try to call cmpstrsi. */
3778 if (HAVE_cmpstrsi)
3780 enum machine_mode insn_mode
3781 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3783 /* Make a place to write the result of the instruction. */
3784 result = target;
3785 if (! (result != 0
3786 && REG_P (result) && GET_MODE (result) == insn_mode
3787 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3788 result = gen_reg_rtx (insn_mode);
3790 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3791 GEN_INT (MIN (arg1_align, arg2_align)));
3793 #endif
3794 #ifdef HAVE_cmpstrnsi
3795 /* Try to determine at least one length and call cmpstrnsi. */
3796 if (!insn && HAVE_cmpstrnsi)
3798 tree len;
3799 rtx arg3_rtx;
3801 enum machine_mode insn_mode
3802 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3803 tree len1 = c_strlen (arg1, 1);
3804 tree len2 = c_strlen (arg2, 1);
3806 if (len1)
3807 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3808 if (len2)
3809 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3811 /* If we don't have a constant length for the first, use the length
3812 of the second, if we know it. We don't require a constant for
3813 this case; some cost analysis could be done if both are available
3814 but neither is constant. For now, assume they're equally cheap,
3815 unless one has side effects. If both strings have constant lengths,
3816 use the smaller. */
3818 if (!len1)
3819 len = len2;
3820 else if (!len2)
3821 len = len1;
3822 else if (TREE_SIDE_EFFECTS (len1))
3823 len = len2;
3824 else if (TREE_SIDE_EFFECTS (len2))
3825 len = len1;
3826 else if (TREE_CODE (len1) != INTEGER_CST)
3827 len = len2;
3828 else if (TREE_CODE (len2) != INTEGER_CST)
3829 len = len1;
3830 else if (tree_int_cst_lt (len1, len2))
3831 len = len1;
3832 else
3833 len = len2;
3835 /* If both arguments have side effects, we cannot optimize. */
3836 if (!len || TREE_SIDE_EFFECTS (len))
3837 goto do_libcall;
3839 arg3_rtx = expand_normal (len);
3841 /* Make a place to write the result of the instruction. */
3842 result = target;
3843 if (! (result != 0
3844 && REG_P (result) && GET_MODE (result) == insn_mode
3845 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3846 result = gen_reg_rtx (insn_mode);
3848 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3849 GEN_INT (MIN (arg1_align, arg2_align)));
3851 #endif
3853 if (insn)
3855 enum machine_mode mode;
3856 emit_insn (insn);
3858 /* Return the value in the proper mode for this function. */
3859 mode = TYPE_MODE (TREE_TYPE (exp));
3860 if (GET_MODE (result) == mode)
3861 return result;
3862 if (target == 0)
3863 return convert_to_mode (mode, result, 0);
3864 convert_move (target, result, 0);
3865 return target;
3868 /* Expand the library call ourselves using a stabilized argument
3869 list to avoid re-evaluating the function's arguments twice. */
3870 #ifdef HAVE_cmpstrnsi
3871 do_libcall:
3872 #endif
3873 fndecl = get_callee_fndecl (exp);
3874 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3875 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3876 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3877 return expand_call (fn, target, target == const0_rtx);
3879 #endif
3880 return NULL_RTX;
3883 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3884 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3885 the result in TARGET, if convenient. */
3887 static rtx
3888 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3889 ATTRIBUTE_UNUSED enum machine_mode mode)
3891 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3893 if (!validate_arglist (exp,
3894 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3895 return NULL_RTX;
3897 /* If c_strlen can determine an expression for one of the string
3898 lengths, and it doesn't have side effects, then emit cmpstrnsi
3899 using length MIN(strlen(string)+1, arg3). */
3900 #ifdef HAVE_cmpstrnsi
3901 if (HAVE_cmpstrnsi)
3903 tree len, len1, len2;
3904 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3905 rtx result, insn;
3906 tree fndecl, fn;
3907 tree arg1 = CALL_EXPR_ARG (exp, 0);
3908 tree arg2 = CALL_EXPR_ARG (exp, 1);
3909 tree arg3 = CALL_EXPR_ARG (exp, 2);
3911 unsigned int arg1_align
3912 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3913 unsigned int arg2_align
3914 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3915 enum machine_mode insn_mode
3916 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3918 len1 = c_strlen (arg1, 1);
3919 len2 = c_strlen (arg2, 1);
3921 if (len1)
3922 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3923 if (len2)
3924 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3926 /* If we don't have a constant length for the first, use the length
3927 of the second, if we know it. We don't require a constant for
3928 this case; some cost analysis could be done if both are available
3929 but neither is constant. For now, assume they're equally cheap,
3930 unless one has side effects. If both strings have constant lengths,
3931 use the smaller. */
3933 if (!len1)
3934 len = len2;
3935 else if (!len2)
3936 len = len1;
3937 else if (TREE_SIDE_EFFECTS (len1))
3938 len = len2;
3939 else if (TREE_SIDE_EFFECTS (len2))
3940 len = len1;
3941 else if (TREE_CODE (len1) != INTEGER_CST)
3942 len = len2;
3943 else if (TREE_CODE (len2) != INTEGER_CST)
3944 len = len1;
3945 else if (tree_int_cst_lt (len1, len2))
3946 len = len1;
3947 else
3948 len = len2;
3950 /* If both arguments have side effects, we cannot optimize. */
3951 if (!len || TREE_SIDE_EFFECTS (len))
3952 return NULL_RTX;
3954 /* The actual new length parameter is MIN(len,arg3). */
3955 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3956 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3958 /* If we don't have POINTER_TYPE, call the function. */
3959 if (arg1_align == 0 || arg2_align == 0)
3960 return NULL_RTX;
3962 /* Make a place to write the result of the instruction. */
3963 result = target;
3964 if (! (result != 0
3965 && REG_P (result) && GET_MODE (result) == insn_mode
3966 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3967 result = gen_reg_rtx (insn_mode);
3969 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3970 arg1 = builtin_save_expr (arg1);
3971 arg2 = builtin_save_expr (arg2);
3972 len = builtin_save_expr (len);
3974 arg1_rtx = get_memory_rtx (arg1, len);
3975 arg2_rtx = get_memory_rtx (arg2, len);
3976 arg3_rtx = expand_normal (len);
3977 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3978 GEN_INT (MIN (arg1_align, arg2_align)));
3979 if (insn)
3981 emit_insn (insn);
3983 /* Return the value in the proper mode for this function. */
3984 mode = TYPE_MODE (TREE_TYPE (exp));
3985 if (GET_MODE (result) == mode)
3986 return result;
3987 if (target == 0)
3988 return convert_to_mode (mode, result, 0);
3989 convert_move (target, result, 0);
3990 return target;
3993 /* Expand the library call ourselves using a stabilized argument
3994 list to avoid re-evaluating the function's arguments twice. */
3995 fndecl = get_callee_fndecl (exp);
3996 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3997 arg1, arg2, len);
3998 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3999 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4000 return expand_call (fn, target, target == const0_rtx);
4002 #endif
4003 return NULL_RTX;
4006 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4007 if that's convenient. */
4010 expand_builtin_saveregs (void)
4012 rtx val, seq;
4014 /* Don't do __builtin_saveregs more than once in a function.
4015 Save the result of the first call and reuse it. */
4016 if (saveregs_value != 0)
4017 return saveregs_value;
4019 /* When this function is called, it means that registers must be
4020 saved on entry to this function. So we migrate the call to the
4021 first insn of this function. */
4023 start_sequence ();
4025 /* Do whatever the machine needs done in this case. */
4026 val = targetm.calls.expand_builtin_saveregs ();
4028 seq = get_insns ();
4029 end_sequence ();
4031 saveregs_value = val;
4033 /* Put the insns after the NOTE that starts the function. If this
4034 is inside a start_sequence, make the outer-level insn chain current, so
4035 the code is placed at the start of the function. */
4036 push_topmost_sequence ();
4037 emit_insn_after (seq, entry_of_function ());
4038 pop_topmost_sequence ();
4040 return val;
4043 /* Expand a call to __builtin_next_arg. */
4045 static rtx
4046 expand_builtin_next_arg (void)
4048 /* Checking arguments is already done in fold_builtin_next_arg
4049 that must be called before this function. */
4050 return expand_binop (ptr_mode, add_optab,
4051 crtl->args.internal_arg_pointer,
4052 crtl->args.arg_offset_rtx,
4053 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4056 /* Make it easier for the backends by protecting the valist argument
4057 from multiple evaluations. */
4059 static tree
4060 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4062 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4064 /* The current way of determining the type of valist is completely
4065 bogus. We should have the information on the va builtin instead. */
4066 if (!vatype)
4067 vatype = targetm.fn_abi_va_list (cfun->decl);
4069 if (TREE_CODE (vatype) == ARRAY_TYPE)
4071 if (TREE_SIDE_EFFECTS (valist))
4072 valist = save_expr (valist);
4074 /* For this case, the backends will be expecting a pointer to
4075 vatype, but it's possible we've actually been given an array
4076 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4077 So fix it. */
4078 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4080 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4081 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4084 else
4086 tree pt = build_pointer_type (vatype);
4088 if (! needs_lvalue)
4090 if (! TREE_SIDE_EFFECTS (valist))
4091 return valist;
4093 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4094 TREE_SIDE_EFFECTS (valist) = 1;
4097 if (TREE_SIDE_EFFECTS (valist))
4098 valist = save_expr (valist);
4099 valist = fold_build2_loc (loc, MEM_REF,
4100 vatype, valist, build_int_cst (pt, 0));
4103 return valist;
4106 /* The "standard" definition of va_list is void*. */
4108 tree
4109 std_build_builtin_va_list (void)
4111 return ptr_type_node;
4114 /* The "standard" abi va_list is va_list_type_node. */
4116 tree
4117 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4119 return va_list_type_node;
4122 /* The "standard" type of va_list is va_list_type_node. */
4124 tree
4125 std_canonical_va_list_type (tree type)
4127 tree wtype, htype;
4129 if (INDIRECT_REF_P (type))
4130 type = TREE_TYPE (type);
4131 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4132 type = TREE_TYPE (type);
4133 wtype = va_list_type_node;
4134 htype = type;
4135 /* Treat structure va_list types. */
4136 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4137 htype = TREE_TYPE (htype);
4138 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4140 /* If va_list is an array type, the argument may have decayed
4141 to a pointer type, e.g. by being passed to another function.
4142 In that case, unwrap both types so that we can compare the
4143 underlying records. */
4144 if (TREE_CODE (htype) == ARRAY_TYPE
4145 || POINTER_TYPE_P (htype))
4147 wtype = TREE_TYPE (wtype);
4148 htype = TREE_TYPE (htype);
4151 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4152 return va_list_type_node;
4154 return NULL_TREE;
4157 /* The "standard" implementation of va_start: just assign `nextarg' to
4158 the variable. */
4160 void
4161 std_expand_builtin_va_start (tree valist, rtx nextarg)
4163 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4164 convert_move (va_r, nextarg, 0);
4167 /* Expand EXP, a call to __builtin_va_start. */
4169 static rtx
4170 expand_builtin_va_start (tree exp)
4172 rtx nextarg;
4173 tree valist;
4174 location_t loc = EXPR_LOCATION (exp);
4176 if (call_expr_nargs (exp) < 2)
4178 error_at (loc, "too few arguments to function %<va_start%>");
4179 return const0_rtx;
4182 if (fold_builtin_next_arg (exp, true))
4183 return const0_rtx;
4185 nextarg = expand_builtin_next_arg ();
4186 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4188 if (targetm.expand_builtin_va_start)
4189 targetm.expand_builtin_va_start (valist, nextarg);
4190 else
4191 std_expand_builtin_va_start (valist, nextarg);
4193 return const0_rtx;
4196 /* The "standard" implementation of va_arg: read the value from the
4197 current (padded) address and increment by the (padded) size. */
4199 tree
4200 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4201 gimple_seq *post_p)
4203 tree addr, t, type_size, rounded_size, valist_tmp;
4204 unsigned HOST_WIDE_INT align, boundary;
4205 bool indirect;
4207 #ifdef ARGS_GROW_DOWNWARD
4208 /* All of the alignment and movement below is for args-grow-up machines.
4209 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4210 implement their own specialized gimplify_va_arg_expr routines. */
4211 gcc_unreachable ();
4212 #endif
4214 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4215 if (indirect)
4216 type = build_pointer_type (type);
4218 align = PARM_BOUNDARY / BITS_PER_UNIT;
4219 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
4221 /* When we align parameter on stack for caller, if the parameter
4222 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4223 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4224 here with caller. */
4225 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4226 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4228 boundary /= BITS_PER_UNIT;
4230 /* Hoist the valist value into a temporary for the moment. */
4231 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4233 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4234 requires greater alignment, we must perform dynamic alignment. */
4235 if (boundary > align
4236 && !integer_zerop (TYPE_SIZE (type)))
4238 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4239 fold_build2 (POINTER_PLUS_EXPR,
4240 TREE_TYPE (valist),
4241 valist_tmp, size_int (boundary - 1)));
4242 gimplify_and_add (t, pre_p);
4244 t = fold_convert (sizetype, valist_tmp);
4245 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4246 fold_convert (TREE_TYPE (valist),
4247 fold_build2 (BIT_AND_EXPR, sizetype, t,
4248 size_int (-boundary))));
4249 gimplify_and_add (t, pre_p);
4251 else
4252 boundary = align;
4254 /* If the actual alignment is less than the alignment of the type,
4255 adjust the type accordingly so that we don't assume strict alignment
4256 when dereferencing the pointer. */
4257 boundary *= BITS_PER_UNIT;
4258 if (boundary < TYPE_ALIGN (type))
4260 type = build_variant_type_copy (type);
4261 TYPE_ALIGN (type) = boundary;
4264 /* Compute the rounded size of the type. */
4265 type_size = size_in_bytes (type);
4266 rounded_size = round_up (type_size, align);
4268 /* Reduce rounded_size so it's sharable with the postqueue. */
4269 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4271 /* Get AP. */
4272 addr = valist_tmp;
4273 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4275 /* Small args are padded downward. */
4276 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4277 rounded_size, size_int (align));
4278 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4279 size_binop (MINUS_EXPR, rounded_size, type_size));
4280 addr = fold_build2 (POINTER_PLUS_EXPR,
4281 TREE_TYPE (addr), addr, t);
4284 /* Compute new value for AP. */
4285 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4286 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4287 gimplify_and_add (t, pre_p);
4289 addr = fold_convert (build_pointer_type (type), addr);
4291 if (indirect)
4292 addr = build_va_arg_indirect_ref (addr);
4294 return build_va_arg_indirect_ref (addr);
4297 /* Build an indirect-ref expression over the given TREE, which represents a
4298 piece of a va_arg() expansion. */
4299 tree
4300 build_va_arg_indirect_ref (tree addr)
4302 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
4304 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4305 mf_mark (addr);
4307 return addr;
4310 /* Return a dummy expression of type TYPE in order to keep going after an
4311 error. */
4313 static tree
4314 dummy_object (tree type)
4316 tree t = build_int_cst (build_pointer_type (type), 0);
4317 return build2 (MEM_REF, type, t, t);
4320 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4321 builtin function, but a very special sort of operator. */
4323 enum gimplify_status
4324 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4326 tree promoted_type, have_va_type;
4327 tree valist = TREE_OPERAND (*expr_p, 0);
4328 tree type = TREE_TYPE (*expr_p);
4329 tree t;
4330 location_t loc = EXPR_LOCATION (*expr_p);
4332 /* Verify that valist is of the proper type. */
4333 have_va_type = TREE_TYPE (valist);
4334 if (have_va_type == error_mark_node)
4335 return GS_ERROR;
4336 have_va_type = targetm.canonical_va_list_type (have_va_type);
4338 if (have_va_type == NULL_TREE)
4340 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4341 return GS_ERROR;
4344 /* Generate a diagnostic for requesting data of a type that cannot
4345 be passed through `...' due to type promotion at the call site. */
4346 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4347 != type)
4349 static bool gave_help;
4350 bool warned;
4352 /* Unfortunately, this is merely undefined, rather than a constraint
4353 violation, so we cannot make this an error. If this call is never
4354 executed, the program is still strictly conforming. */
4355 warned = warning_at (loc, 0,
4356 "%qT is promoted to %qT when passed through %<...%>",
4357 type, promoted_type);
4358 if (!gave_help && warned)
4360 gave_help = true;
4361 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4362 promoted_type, type);
4365 /* We can, however, treat "undefined" any way we please.
4366 Call abort to encourage the user to fix the program. */
4367 if (warned)
4368 inform (loc, "if this code is reached, the program will abort");
4369 /* Before the abort, allow the evaluation of the va_list
4370 expression to exit or longjmp. */
4371 gimplify_and_add (valist, pre_p);
4372 t = build_call_expr_loc (loc,
4373 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4374 gimplify_and_add (t, pre_p);
4376 /* This is dead code, but go ahead and finish so that the
4377 mode of the result comes out right. */
4378 *expr_p = dummy_object (type);
4379 return GS_ALL_DONE;
4381 else
4383 /* Make it easier for the backends by protecting the valist argument
4384 from multiple evaluations. */
4385 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4387 /* For this case, the backends will be expecting a pointer to
4388 TREE_TYPE (abi), but it's possible we've
4389 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4390 So fix it. */
4391 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4393 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4394 valist = fold_convert_loc (loc, p1,
4395 build_fold_addr_expr_loc (loc, valist));
4398 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4400 else
4401 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4403 if (!targetm.gimplify_va_arg_expr)
4404 /* FIXME: Once most targets are converted we should merely
4405 assert this is non-null. */
4406 return GS_ALL_DONE;
4408 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4409 return GS_OK;
4413 /* Expand EXP, a call to __builtin_va_end. */
4415 static rtx
4416 expand_builtin_va_end (tree exp)
4418 tree valist = CALL_EXPR_ARG (exp, 0);
4420 /* Evaluate for side effects, if needed. I hate macros that don't
4421 do that. */
4422 if (TREE_SIDE_EFFECTS (valist))
4423 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4425 return const0_rtx;
4428 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4429 builtin rather than just as an assignment in stdarg.h because of the
4430 nastiness of array-type va_list types. */
4432 static rtx
4433 expand_builtin_va_copy (tree exp)
4435 tree dst, src, t;
4436 location_t loc = EXPR_LOCATION (exp);
4438 dst = CALL_EXPR_ARG (exp, 0);
4439 src = CALL_EXPR_ARG (exp, 1);
4441 dst = stabilize_va_list_loc (loc, dst, 1);
4442 src = stabilize_va_list_loc (loc, src, 0);
4444 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4446 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4448 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4449 TREE_SIDE_EFFECTS (t) = 1;
4450 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4452 else
4454 rtx dstb, srcb, size;
4456 /* Evaluate to pointers. */
4457 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4458 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4459 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4460 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4462 dstb = convert_memory_address (Pmode, dstb);
4463 srcb = convert_memory_address (Pmode, srcb);
4465 /* "Dereference" to BLKmode memories. */
4466 dstb = gen_rtx_MEM (BLKmode, dstb);
4467 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4468 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4469 srcb = gen_rtx_MEM (BLKmode, srcb);
4470 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4471 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4473 /* Copy. */
4474 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4477 return const0_rtx;
4480 /* Expand a call to one of the builtin functions __builtin_frame_address or
4481 __builtin_return_address. */
4483 static rtx
4484 expand_builtin_frame_address (tree fndecl, tree exp)
4486 /* The argument must be a nonnegative integer constant.
4487 It counts the number of frames to scan up the stack.
4488 The value is the return address saved in that frame. */
4489 if (call_expr_nargs (exp) == 0)
4490 /* Warning about missing arg was already issued. */
4491 return const0_rtx;
4492 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4494 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4495 error ("invalid argument to %<__builtin_frame_address%>");
4496 else
4497 error ("invalid argument to %<__builtin_return_address%>");
4498 return const0_rtx;
4500 else
4502 rtx tem
4503 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4504 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4506 /* Some ports cannot access arbitrary stack frames. */
4507 if (tem == NULL)
4509 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4510 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4511 else
4512 warning (0, "unsupported argument to %<__builtin_return_address%>");
4513 return const0_rtx;
4516 /* For __builtin_frame_address, return what we've got. */
4517 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4518 return tem;
4520 if (!REG_P (tem)
4521 && ! CONSTANT_P (tem))
4522 tem = copy_to_mode_reg (Pmode, tem);
4523 return tem;
4527 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4528 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4529 is the same as for allocate_dynamic_stack_space. */
4531 static rtx
4532 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4534 rtx op0;
4535 rtx result;
4537 /* Emit normal call if marked not-inlineable. */
4538 if (CALL_CANNOT_INLINE_P (exp))
4539 return NULL_RTX;
4541 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4542 return NULL_RTX;
4544 /* Compute the argument. */
4545 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4547 /* Allocate the desired space. */
4548 result = allocate_dynamic_stack_space (op0, 0, BIGGEST_ALIGNMENT,
4549 cannot_accumulate);
4550 result = convert_memory_address (ptr_mode, result);
4552 return result;
4555 /* Expand a call to a bswap builtin with argument ARG0. MODE
4556 is the mode to expand with. */
4558 static rtx
4559 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4561 enum machine_mode mode;
4562 tree arg;
4563 rtx op0;
4565 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4566 return NULL_RTX;
4568 arg = CALL_EXPR_ARG (exp, 0);
4569 mode = TYPE_MODE (TREE_TYPE (arg));
4570 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4572 target = expand_unop (mode, bswap_optab, op0, target, 1);
4574 gcc_assert (target);
4576 return convert_to_mode (mode, target, 0);
4579 /* Expand a call to a unary builtin in EXP.
4580 Return NULL_RTX if a normal call should be emitted rather than expanding the
4581 function in-line. If convenient, the result should be placed in TARGET.
4582 SUBTARGET may be used as the target for computing one of EXP's operands. */
4584 static rtx
4585 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4586 rtx subtarget, optab op_optab)
4588 rtx op0;
4590 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4591 return NULL_RTX;
4593 /* Compute the argument. */
4594 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4595 (subtarget
4596 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4597 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4598 VOIDmode, EXPAND_NORMAL);
4599 /* Compute op, into TARGET if possible.
4600 Set TARGET to wherever the result comes back. */
4601 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4602 op_optab, op0, target, op_optab != clrsb_optab);
4603 gcc_assert (target);
4605 return convert_to_mode (target_mode, target, 0);
4608 /* Expand a call to __builtin_expect. We just return our argument
4609 as the builtin_expect semantic should've been already executed by
4610 tree branch prediction pass. */
4612 static rtx
4613 expand_builtin_expect (tree exp, rtx target)
4615 tree arg;
4617 if (call_expr_nargs (exp) < 2)
4618 return const0_rtx;
4619 arg = CALL_EXPR_ARG (exp, 0);
4621 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4622 /* When guessing was done, the hints should be already stripped away. */
4623 gcc_assert (!flag_guess_branch_prob
4624 || optimize == 0 || seen_error ());
4625 return target;
4628 /* Expand a call to __builtin_assume_aligned. We just return our first
4629 argument as the builtin_assume_aligned semantic should've been already
4630 executed by CCP. */
4632 static rtx
4633 expand_builtin_assume_aligned (tree exp, rtx target)
4635 if (call_expr_nargs (exp) < 2)
4636 return const0_rtx;
4637 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4638 EXPAND_NORMAL);
4639 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4640 && (call_expr_nargs (exp) < 3
4641 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4642 return target;
4645 void
4646 expand_builtin_trap (void)
4648 #ifdef HAVE_trap
4649 if (HAVE_trap)
4650 emit_insn (gen_trap ());
4651 else
4652 #endif
4653 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4654 emit_barrier ();
4657 /* Expand a call to __builtin_unreachable. We do nothing except emit
4658 a barrier saying that control flow will not pass here.
4660 It is the responsibility of the program being compiled to ensure
4661 that control flow does never reach __builtin_unreachable. */
4662 static void
4663 expand_builtin_unreachable (void)
4665 emit_barrier ();
4668 /* Expand EXP, a call to fabs, fabsf or fabsl.
4669 Return NULL_RTX if a normal call should be emitted rather than expanding
4670 the function inline. If convenient, the result should be placed
4671 in TARGET. SUBTARGET may be used as the target for computing
4672 the operand. */
4674 static rtx
4675 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4677 enum machine_mode mode;
4678 tree arg;
4679 rtx op0;
4681 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4682 return NULL_RTX;
4684 arg = CALL_EXPR_ARG (exp, 0);
4685 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4686 mode = TYPE_MODE (TREE_TYPE (arg));
4687 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4688 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4691 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4692 Return NULL is a normal call should be emitted rather than expanding the
4693 function inline. If convenient, the result should be placed in TARGET.
4694 SUBTARGET may be used as the target for computing the operand. */
4696 static rtx
4697 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4699 rtx op0, op1;
4700 tree arg;
4702 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4703 return NULL_RTX;
4705 arg = CALL_EXPR_ARG (exp, 0);
4706 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4708 arg = CALL_EXPR_ARG (exp, 1);
4709 op1 = expand_normal (arg);
4711 return expand_copysign (op0, op1, target);
4714 /* Create a new constant string literal and return a char* pointer to it.
4715 The STRING_CST value is the LEN characters at STR. */
4716 tree
4717 build_string_literal (int len, const char *str)
4719 tree t, elem, index, type;
4721 t = build_string (len, str);
4722 elem = build_type_variant (char_type_node, 1, 0);
4723 index = build_index_type (size_int (len - 1));
4724 type = build_array_type (elem, index);
4725 TREE_TYPE (t) = type;
4726 TREE_CONSTANT (t) = 1;
4727 TREE_READONLY (t) = 1;
4728 TREE_STATIC (t) = 1;
4730 type = build_pointer_type (elem);
4731 t = build1 (ADDR_EXPR, type,
4732 build4 (ARRAY_REF, elem,
4733 t, integer_zero_node, NULL_TREE, NULL_TREE));
4734 return t;
4737 /* Expand a call to __builtin___clear_cache. */
4739 static rtx
4740 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4742 #ifndef HAVE_clear_cache
4743 #ifdef CLEAR_INSN_CACHE
4744 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4745 does something. Just do the default expansion to a call to
4746 __clear_cache(). */
4747 return NULL_RTX;
4748 #else
4749 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4750 does nothing. There is no need to call it. Do nothing. */
4751 return const0_rtx;
4752 #endif /* CLEAR_INSN_CACHE */
4753 #else
4754 /* We have a "clear_cache" insn, and it will handle everything. */
4755 tree begin, end;
4756 rtx begin_rtx, end_rtx;
4758 /* We must not expand to a library call. If we did, any
4759 fallback library function in libgcc that might contain a call to
4760 __builtin___clear_cache() would recurse infinitely. */
4761 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4763 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4764 return const0_rtx;
4767 if (HAVE_clear_cache)
4769 struct expand_operand ops[2];
4771 begin = CALL_EXPR_ARG (exp, 0);
4772 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4774 end = CALL_EXPR_ARG (exp, 1);
4775 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4777 create_address_operand (&ops[0], begin_rtx);
4778 create_address_operand (&ops[1], end_rtx);
4779 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4780 return const0_rtx;
4782 return const0_rtx;
4783 #endif /* HAVE_clear_cache */
4786 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4788 static rtx
4789 round_trampoline_addr (rtx tramp)
4791 rtx temp, addend, mask;
4793 /* If we don't need too much alignment, we'll have been guaranteed
4794 proper alignment by get_trampoline_type. */
4795 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4796 return tramp;
4798 /* Round address up to desired boundary. */
4799 temp = gen_reg_rtx (Pmode);
4800 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
4801 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
4803 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4804 temp, 0, OPTAB_LIB_WIDEN);
4805 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4806 temp, 0, OPTAB_LIB_WIDEN);
4808 return tramp;
4811 static rtx
4812 expand_builtin_init_trampoline (tree exp)
4814 tree t_tramp, t_func, t_chain;
4815 rtx m_tramp, r_tramp, r_chain, tmp;
4817 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4818 POINTER_TYPE, VOID_TYPE))
4819 return NULL_RTX;
4821 t_tramp = CALL_EXPR_ARG (exp, 0);
4822 t_func = CALL_EXPR_ARG (exp, 1);
4823 t_chain = CALL_EXPR_ARG (exp, 2);
4825 r_tramp = expand_normal (t_tramp);
4826 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4827 MEM_NOTRAP_P (m_tramp) = 1;
4829 /* The TRAMP argument should be the address of a field within the
4830 local function's FRAME decl. Let's see if we can fill in the
4831 to fill in the MEM_ATTRs for this memory. */
4832 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4833 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
4834 true, 0);
4836 tmp = round_trampoline_addr (r_tramp);
4837 if (tmp != r_tramp)
4839 m_tramp = change_address (m_tramp, BLKmode, tmp);
4840 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4841 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
4844 /* The FUNC argument should be the address of the nested function.
4845 Extract the actual function decl to pass to the hook. */
4846 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4847 t_func = TREE_OPERAND (t_func, 0);
4848 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4850 r_chain = expand_normal (t_chain);
4852 /* Generate insns to initialize the trampoline. */
4853 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4855 trampolines_created = 1;
4857 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4858 "trampoline generated for nested function %qD", t_func);
4860 return const0_rtx;
4863 static rtx
4864 expand_builtin_adjust_trampoline (tree exp)
4866 rtx tramp;
4868 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4869 return NULL_RTX;
4871 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4872 tramp = round_trampoline_addr (tramp);
4873 if (targetm.calls.trampoline_adjust_address)
4874 tramp = targetm.calls.trampoline_adjust_address (tramp);
4876 return tramp;
4879 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4880 function. The function first checks whether the back end provides
4881 an insn to implement signbit for the respective mode. If not, it
4882 checks whether the floating point format of the value is such that
4883 the sign bit can be extracted. If that is not the case, the
4884 function returns NULL_RTX to indicate that a normal call should be
4885 emitted rather than expanding the function in-line. EXP is the
4886 expression that is a call to the builtin function; if convenient,
4887 the result should be placed in TARGET. */
4888 static rtx
4889 expand_builtin_signbit (tree exp, rtx target)
4891 const struct real_format *fmt;
4892 enum machine_mode fmode, imode, rmode;
4893 tree arg;
4894 int word, bitpos;
4895 enum insn_code icode;
4896 rtx temp;
4897 location_t loc = EXPR_LOCATION (exp);
4899 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4900 return NULL_RTX;
4902 arg = CALL_EXPR_ARG (exp, 0);
4903 fmode = TYPE_MODE (TREE_TYPE (arg));
4904 rmode = TYPE_MODE (TREE_TYPE (exp));
4905 fmt = REAL_MODE_FORMAT (fmode);
4907 arg = builtin_save_expr (arg);
4909 /* Expand the argument yielding a RTX expression. */
4910 temp = expand_normal (arg);
4912 /* Check if the back end provides an insn that handles signbit for the
4913 argument's mode. */
4914 icode = optab_handler (signbit_optab, fmode);
4915 if (icode != CODE_FOR_nothing)
4917 rtx last = get_last_insn ();
4918 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4919 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4920 return target;
4921 delete_insns_since (last);
4924 /* For floating point formats without a sign bit, implement signbit
4925 as "ARG < 0.0". */
4926 bitpos = fmt->signbit_ro;
4927 if (bitpos < 0)
4929 /* But we can't do this if the format supports signed zero. */
4930 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4931 return NULL_RTX;
4933 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4934 build_real (TREE_TYPE (arg), dconst0));
4935 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4938 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4940 imode = int_mode_for_mode (fmode);
4941 if (imode == BLKmode)
4942 return NULL_RTX;
4943 temp = gen_lowpart (imode, temp);
4945 else
4947 imode = word_mode;
4948 /* Handle targets with different FP word orders. */
4949 if (FLOAT_WORDS_BIG_ENDIAN)
4950 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4951 else
4952 word = bitpos / BITS_PER_WORD;
4953 temp = operand_subword_force (temp, word, fmode);
4954 bitpos = bitpos % BITS_PER_WORD;
4957 /* Force the intermediate word_mode (or narrower) result into a
4958 register. This avoids attempting to create paradoxical SUBREGs
4959 of floating point modes below. */
4960 temp = force_reg (imode, temp);
4962 /* If the bitpos is within the "result mode" lowpart, the operation
4963 can be implement with a single bitwise AND. Otherwise, we need
4964 a right shift and an AND. */
4966 if (bitpos < GET_MODE_BITSIZE (rmode))
4968 double_int mask = double_int_setbit (double_int_zero, bitpos);
4970 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4971 temp = gen_lowpart (rmode, temp);
4972 temp = expand_binop (rmode, and_optab, temp,
4973 immed_double_int_const (mask, rmode),
4974 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4976 else
4978 /* Perform a logical right shift to place the signbit in the least
4979 significant bit, then truncate the result to the desired mode
4980 and mask just this bit. */
4981 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4982 temp = gen_lowpart (rmode, temp);
4983 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4984 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4987 return temp;
4990 /* Expand fork or exec calls. TARGET is the desired target of the
4991 call. EXP is the call. FN is the
4992 identificator of the actual function. IGNORE is nonzero if the
4993 value is to be ignored. */
4995 static rtx
4996 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4998 tree id, decl;
4999 tree call;
5001 /* If we are not profiling, just call the function. */
5002 if (!profile_arc_flag)
5003 return NULL_RTX;
5005 /* Otherwise call the wrapper. This should be equivalent for the rest of
5006 compiler, so the code does not diverge, and the wrapper may run the
5007 code necessary for keeping the profiling sane. */
5009 switch (DECL_FUNCTION_CODE (fn))
5011 case BUILT_IN_FORK:
5012 id = get_identifier ("__gcov_fork");
5013 break;
5015 case BUILT_IN_EXECL:
5016 id = get_identifier ("__gcov_execl");
5017 break;
5019 case BUILT_IN_EXECV:
5020 id = get_identifier ("__gcov_execv");
5021 break;
5023 case BUILT_IN_EXECLP:
5024 id = get_identifier ("__gcov_execlp");
5025 break;
5027 case BUILT_IN_EXECLE:
5028 id = get_identifier ("__gcov_execle");
5029 break;
5031 case BUILT_IN_EXECVP:
5032 id = get_identifier ("__gcov_execvp");
5033 break;
5035 case BUILT_IN_EXECVE:
5036 id = get_identifier ("__gcov_execve");
5037 break;
5039 default:
5040 gcc_unreachable ();
5043 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5044 FUNCTION_DECL, id, TREE_TYPE (fn));
5045 DECL_EXTERNAL (decl) = 1;
5046 TREE_PUBLIC (decl) = 1;
5047 DECL_ARTIFICIAL (decl) = 1;
5048 TREE_NOTHROW (decl) = 1;
5049 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5050 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5051 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5052 return expand_call (call, target, ignore);
5057 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5058 the pointer in these functions is void*, the tree optimizers may remove
5059 casts. The mode computed in expand_builtin isn't reliable either, due
5060 to __sync_bool_compare_and_swap.
5062 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5063 group of builtins. This gives us log2 of the mode size. */
5065 static inline enum machine_mode
5066 get_builtin_sync_mode (int fcode_diff)
5068 /* The size is not negotiable, so ask not to get BLKmode in return
5069 if the target indicates that a smaller size would be better. */
5070 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5073 /* Expand the memory expression LOC and return the appropriate memory operand
5074 for the builtin_sync operations. */
5076 static rtx
5077 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5079 rtx addr, mem;
5081 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5082 addr = convert_memory_address (Pmode, addr);
5084 /* Note that we explicitly do not want any alias information for this
5085 memory, so that we kill all other live memories. Otherwise we don't
5086 satisfy the full barrier semantics of the intrinsic. */
5087 mem = validize_mem (gen_rtx_MEM (mode, addr));
5089 /* The alignment needs to be at least according to that of the mode. */
5090 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5091 get_pointer_alignment (loc, BIGGEST_ALIGNMENT)));
5092 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5093 MEM_VOLATILE_P (mem) = 1;
5095 return mem;
5098 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5099 EXP is the CALL_EXPR. CODE is the rtx code
5100 that corresponds to the arithmetic or logical operation from the name;
5101 an exception here is that NOT actually means NAND. TARGET is an optional
5102 place for us to store the results; AFTER is true if this is the
5103 fetch_and_xxx form. IGNORE is true if we don't actually care about
5104 the result of the operation at all. */
5106 static rtx
5107 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5108 enum rtx_code code, bool after,
5109 rtx target, bool ignore)
5111 rtx val, mem;
5112 enum machine_mode old_mode;
5113 location_t loc = EXPR_LOCATION (exp);
5115 if (code == NOT && warn_sync_nand)
5117 tree fndecl = get_callee_fndecl (exp);
5118 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5120 static bool warned_f_a_n, warned_n_a_f;
5122 switch (fcode)
5124 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5125 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5126 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5127 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5128 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5130 if (warned_f_a_n)
5131 break;
5133 fndecl = implicit_built_in_decls[BUILT_IN_SYNC_FETCH_AND_NAND_N];
5134 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5135 warned_f_a_n = true;
5136 break;
5138 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5139 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5140 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5141 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5142 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5144 if (warned_n_a_f)
5145 break;
5147 fndecl = implicit_built_in_decls[BUILT_IN_SYNC_NAND_AND_FETCH_N];
5148 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5149 warned_n_a_f = true;
5150 break;
5152 default:
5153 gcc_unreachable ();
5157 /* Expand the operands. */
5158 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5160 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5161 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5162 of CONST_INTs, where we know the old_mode only from the call argument. */
5163 old_mode = GET_MODE (val);
5164 if (old_mode == VOIDmode)
5165 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5166 val = convert_modes (mode, old_mode, val, 1);
5168 if (ignore)
5169 return expand_sync_operation (mem, val, code);
5170 else
5171 return expand_sync_fetch_operation (mem, val, code, after, target);
5174 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5175 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5176 true if this is the boolean form. TARGET is a place for us to store the
5177 results; this is NOT optional if IS_BOOL is true. */
5179 static rtx
5180 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5181 bool is_bool, rtx target)
5183 rtx old_val, new_val, mem;
5184 enum machine_mode old_mode;
5186 /* Expand the operands. */
5187 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5190 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5191 mode, EXPAND_NORMAL);
5192 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5193 of CONST_INTs, where we know the old_mode only from the call argument. */
5194 old_mode = GET_MODE (old_val);
5195 if (old_mode == VOIDmode)
5196 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5197 old_val = convert_modes (mode, old_mode, old_val, 1);
5199 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5200 mode, EXPAND_NORMAL);
5201 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5202 of CONST_INTs, where we know the old_mode only from the call argument. */
5203 old_mode = GET_MODE (new_val);
5204 if (old_mode == VOIDmode)
5205 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5206 new_val = convert_modes (mode, old_mode, new_val, 1);
5208 if (is_bool)
5209 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5210 else
5211 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5214 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5215 general form is actually an atomic exchange, and some targets only
5216 support a reduced form with the second argument being a constant 1.
5217 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5218 the results. */
5220 static rtx
5221 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5222 rtx target)
5224 rtx val, mem;
5225 enum machine_mode old_mode;
5227 /* Expand the operands. */
5228 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5229 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5230 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5231 of CONST_INTs, where we know the old_mode only from the call argument. */
5232 old_mode = GET_MODE (val);
5233 if (old_mode == VOIDmode)
5234 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5235 val = convert_modes (mode, old_mode, val, 1);
5237 return expand_sync_lock_test_and_set (mem, val, target);
5240 /* Expand the __sync_synchronize intrinsic. */
5242 static void
5243 expand_builtin_sync_synchronize (void)
5245 gimple x;
5246 VEC (tree, gc) *v_clobbers;
5248 #ifdef HAVE_memory_barrier
5249 if (HAVE_memory_barrier)
5251 emit_insn (gen_memory_barrier ());
5252 return;
5254 #endif
5256 if (synchronize_libfunc != NULL_RTX)
5258 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5259 return;
5262 /* If no explicit memory barrier instruction is available, create an
5263 empty asm stmt with a memory clobber. */
5264 v_clobbers = VEC_alloc (tree, gc, 1);
5265 VEC_quick_push (tree, v_clobbers,
5266 tree_cons (NULL, build_string (6, "memory"), NULL));
5267 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5268 gimple_asm_set_volatile (x, true);
5269 expand_asm_stmt (x);
5272 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5274 static void
5275 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5277 struct expand_operand ops[2];
5278 enum insn_code icode;
5279 rtx mem;
5281 /* Expand the operands. */
5282 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5284 /* If there is an explicit operation in the md file, use it. */
5285 icode = direct_optab_handler (sync_lock_release_optab, mode);
5286 if (icode != CODE_FOR_nothing)
5288 create_fixed_operand (&ops[0], mem);
5289 create_input_operand (&ops[1], const0_rtx, mode);
5290 if (maybe_expand_insn (icode, 2, ops))
5291 return;
5294 /* Otherwise we can implement this operation by emitting a barrier
5295 followed by a store of zero. */
5296 expand_builtin_sync_synchronize ();
5297 emit_move_insn (mem, const0_rtx);
5300 /* Expand an expression EXP that calls a built-in function,
5301 with result going to TARGET if that's convenient
5302 (and in mode MODE if that's convenient).
5303 SUBTARGET may be used as the target for computing one of EXP's operands.
5304 IGNORE is nonzero if the value is to be ignored. */
5307 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5308 int ignore)
5310 tree fndecl = get_callee_fndecl (exp);
5311 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5312 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5313 int flags;
5315 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5316 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5318 /* When not optimizing, generate calls to library functions for a certain
5319 set of builtins. */
5320 if (!optimize
5321 && !called_as_built_in (fndecl)
5322 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5323 && fcode != BUILT_IN_ALLOCA
5324 && fcode != BUILT_IN_FREE)
5325 return expand_call (exp, target, ignore);
5327 /* The built-in function expanders test for target == const0_rtx
5328 to determine whether the function's result will be ignored. */
5329 if (ignore)
5330 target = const0_rtx;
5332 /* If the result of a pure or const built-in function is ignored, and
5333 none of its arguments are volatile, we can avoid expanding the
5334 built-in call and just evaluate the arguments for side-effects. */
5335 if (target == const0_rtx
5336 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5337 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5339 bool volatilep = false;
5340 tree arg;
5341 call_expr_arg_iterator iter;
5343 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5344 if (TREE_THIS_VOLATILE (arg))
5346 volatilep = true;
5347 break;
5350 if (! volatilep)
5352 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5353 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5354 return const0_rtx;
5358 switch (fcode)
5360 CASE_FLT_FN (BUILT_IN_FABS):
5361 target = expand_builtin_fabs (exp, target, subtarget);
5362 if (target)
5363 return target;
5364 break;
5366 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5367 target = expand_builtin_copysign (exp, target, subtarget);
5368 if (target)
5369 return target;
5370 break;
5372 /* Just do a normal library call if we were unable to fold
5373 the values. */
5374 CASE_FLT_FN (BUILT_IN_CABS):
5375 break;
5377 CASE_FLT_FN (BUILT_IN_EXP):
5378 CASE_FLT_FN (BUILT_IN_EXP10):
5379 CASE_FLT_FN (BUILT_IN_POW10):
5380 CASE_FLT_FN (BUILT_IN_EXP2):
5381 CASE_FLT_FN (BUILT_IN_EXPM1):
5382 CASE_FLT_FN (BUILT_IN_LOGB):
5383 CASE_FLT_FN (BUILT_IN_LOG):
5384 CASE_FLT_FN (BUILT_IN_LOG10):
5385 CASE_FLT_FN (BUILT_IN_LOG2):
5386 CASE_FLT_FN (BUILT_IN_LOG1P):
5387 CASE_FLT_FN (BUILT_IN_TAN):
5388 CASE_FLT_FN (BUILT_IN_ASIN):
5389 CASE_FLT_FN (BUILT_IN_ACOS):
5390 CASE_FLT_FN (BUILT_IN_ATAN):
5391 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5392 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5393 because of possible accuracy problems. */
5394 if (! flag_unsafe_math_optimizations)
5395 break;
5396 CASE_FLT_FN (BUILT_IN_SQRT):
5397 CASE_FLT_FN (BUILT_IN_FLOOR):
5398 CASE_FLT_FN (BUILT_IN_CEIL):
5399 CASE_FLT_FN (BUILT_IN_TRUNC):
5400 CASE_FLT_FN (BUILT_IN_ROUND):
5401 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5402 CASE_FLT_FN (BUILT_IN_RINT):
5403 target = expand_builtin_mathfn (exp, target, subtarget);
5404 if (target)
5405 return target;
5406 break;
5408 CASE_FLT_FN (BUILT_IN_FMA):
5409 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5410 if (target)
5411 return target;
5412 break;
5414 CASE_FLT_FN (BUILT_IN_ILOGB):
5415 if (! flag_unsafe_math_optimizations)
5416 break;
5417 CASE_FLT_FN (BUILT_IN_ISINF):
5418 CASE_FLT_FN (BUILT_IN_FINITE):
5419 case BUILT_IN_ISFINITE:
5420 case BUILT_IN_ISNORMAL:
5421 target = expand_builtin_interclass_mathfn (exp, target);
5422 if (target)
5423 return target;
5424 break;
5426 CASE_FLT_FN (BUILT_IN_LCEIL):
5427 CASE_FLT_FN (BUILT_IN_LLCEIL):
5428 CASE_FLT_FN (BUILT_IN_LFLOOR):
5429 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5430 target = expand_builtin_int_roundingfn (exp, target);
5431 if (target)
5432 return target;
5433 break;
5435 CASE_FLT_FN (BUILT_IN_LRINT):
5436 CASE_FLT_FN (BUILT_IN_LLRINT):
5437 CASE_FLT_FN (BUILT_IN_LROUND):
5438 CASE_FLT_FN (BUILT_IN_LLROUND):
5439 target = expand_builtin_int_roundingfn_2 (exp, target);
5440 if (target)
5441 return target;
5442 break;
5444 CASE_FLT_FN (BUILT_IN_POWI):
5445 target = expand_builtin_powi (exp, target);
5446 if (target)
5447 return target;
5448 break;
5450 CASE_FLT_FN (BUILT_IN_ATAN2):
5451 CASE_FLT_FN (BUILT_IN_LDEXP):
5452 CASE_FLT_FN (BUILT_IN_SCALB):
5453 CASE_FLT_FN (BUILT_IN_SCALBN):
5454 CASE_FLT_FN (BUILT_IN_SCALBLN):
5455 if (! flag_unsafe_math_optimizations)
5456 break;
5458 CASE_FLT_FN (BUILT_IN_FMOD):
5459 CASE_FLT_FN (BUILT_IN_REMAINDER):
5460 CASE_FLT_FN (BUILT_IN_DREM):
5461 CASE_FLT_FN (BUILT_IN_POW):
5462 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5463 if (target)
5464 return target;
5465 break;
5467 CASE_FLT_FN (BUILT_IN_CEXPI):
5468 target = expand_builtin_cexpi (exp, target);
5469 gcc_assert (target);
5470 return target;
5472 CASE_FLT_FN (BUILT_IN_SIN):
5473 CASE_FLT_FN (BUILT_IN_COS):
5474 if (! flag_unsafe_math_optimizations)
5475 break;
5476 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5477 if (target)
5478 return target;
5479 break;
5481 CASE_FLT_FN (BUILT_IN_SINCOS):
5482 if (! flag_unsafe_math_optimizations)
5483 break;
5484 target = expand_builtin_sincos (exp);
5485 if (target)
5486 return target;
5487 break;
5489 case BUILT_IN_APPLY_ARGS:
5490 return expand_builtin_apply_args ();
5492 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5493 FUNCTION with a copy of the parameters described by
5494 ARGUMENTS, and ARGSIZE. It returns a block of memory
5495 allocated on the stack into which is stored all the registers
5496 that might possibly be used for returning the result of a
5497 function. ARGUMENTS is the value returned by
5498 __builtin_apply_args. ARGSIZE is the number of bytes of
5499 arguments that must be copied. ??? How should this value be
5500 computed? We'll also need a safe worst case value for varargs
5501 functions. */
5502 case BUILT_IN_APPLY:
5503 if (!validate_arglist (exp, POINTER_TYPE,
5504 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5505 && !validate_arglist (exp, REFERENCE_TYPE,
5506 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5507 return const0_rtx;
5508 else
5510 rtx ops[3];
5512 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5513 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5514 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5516 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5519 /* __builtin_return (RESULT) causes the function to return the
5520 value described by RESULT. RESULT is address of the block of
5521 memory returned by __builtin_apply. */
5522 case BUILT_IN_RETURN:
5523 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5524 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5525 return const0_rtx;
5527 case BUILT_IN_SAVEREGS:
5528 return expand_builtin_saveregs ();
5530 case BUILT_IN_VA_ARG_PACK:
5531 /* All valid uses of __builtin_va_arg_pack () are removed during
5532 inlining. */
5533 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5534 return const0_rtx;
5536 case BUILT_IN_VA_ARG_PACK_LEN:
5537 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5538 inlining. */
5539 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5540 return const0_rtx;
5542 /* Return the address of the first anonymous stack arg. */
5543 case BUILT_IN_NEXT_ARG:
5544 if (fold_builtin_next_arg (exp, false))
5545 return const0_rtx;
5546 return expand_builtin_next_arg ();
5548 case BUILT_IN_CLEAR_CACHE:
5549 target = expand_builtin___clear_cache (exp);
5550 if (target)
5551 return target;
5552 break;
5554 case BUILT_IN_CLASSIFY_TYPE:
5555 return expand_builtin_classify_type (exp);
5557 case BUILT_IN_CONSTANT_P:
5558 return const0_rtx;
5560 case BUILT_IN_FRAME_ADDRESS:
5561 case BUILT_IN_RETURN_ADDRESS:
5562 return expand_builtin_frame_address (fndecl, exp);
5564 /* Returns the address of the area where the structure is returned.
5565 0 otherwise. */
5566 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5567 if (call_expr_nargs (exp) != 0
5568 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5569 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5570 return const0_rtx;
5571 else
5572 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5574 case BUILT_IN_ALLOCA:
5575 /* If the allocation stems from the declaration of a variable-sized
5576 object, it cannot accumulate. */
5577 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
5578 if (target)
5579 return target;
5580 break;
5582 case BUILT_IN_STACK_SAVE:
5583 return expand_stack_save ();
5585 case BUILT_IN_STACK_RESTORE:
5586 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5587 return const0_rtx;
5589 case BUILT_IN_BSWAP32:
5590 case BUILT_IN_BSWAP64:
5591 target = expand_builtin_bswap (exp, target, subtarget);
5593 if (target)
5594 return target;
5595 break;
5597 CASE_INT_FN (BUILT_IN_FFS):
5598 case BUILT_IN_FFSIMAX:
5599 target = expand_builtin_unop (target_mode, exp, target,
5600 subtarget, ffs_optab);
5601 if (target)
5602 return target;
5603 break;
5605 CASE_INT_FN (BUILT_IN_CLZ):
5606 case BUILT_IN_CLZIMAX:
5607 target = expand_builtin_unop (target_mode, exp, target,
5608 subtarget, clz_optab);
5609 if (target)
5610 return target;
5611 break;
5613 CASE_INT_FN (BUILT_IN_CTZ):
5614 case BUILT_IN_CTZIMAX:
5615 target = expand_builtin_unop (target_mode, exp, target,
5616 subtarget, ctz_optab);
5617 if (target)
5618 return target;
5619 break;
5621 CASE_INT_FN (BUILT_IN_CLRSB):
5622 case BUILT_IN_CLRSBIMAX:
5623 target = expand_builtin_unop (target_mode, exp, target,
5624 subtarget, clrsb_optab);
5625 if (target)
5626 return target;
5627 break;
5629 CASE_INT_FN (BUILT_IN_POPCOUNT):
5630 case BUILT_IN_POPCOUNTIMAX:
5631 target = expand_builtin_unop (target_mode, exp, target,
5632 subtarget, popcount_optab);
5633 if (target)
5634 return target;
5635 break;
5637 CASE_INT_FN (BUILT_IN_PARITY):
5638 case BUILT_IN_PARITYIMAX:
5639 target = expand_builtin_unop (target_mode, exp, target,
5640 subtarget, parity_optab);
5641 if (target)
5642 return target;
5643 break;
5645 case BUILT_IN_STRLEN:
5646 target = expand_builtin_strlen (exp, target, target_mode);
5647 if (target)
5648 return target;
5649 break;
5651 case BUILT_IN_STRCPY:
5652 target = expand_builtin_strcpy (exp, target);
5653 if (target)
5654 return target;
5655 break;
5657 case BUILT_IN_STRNCPY:
5658 target = expand_builtin_strncpy (exp, target);
5659 if (target)
5660 return target;
5661 break;
5663 case BUILT_IN_STPCPY:
5664 target = expand_builtin_stpcpy (exp, target, mode);
5665 if (target)
5666 return target;
5667 break;
5669 case BUILT_IN_MEMCPY:
5670 target = expand_builtin_memcpy (exp, target);
5671 if (target)
5672 return target;
5673 break;
5675 case BUILT_IN_MEMPCPY:
5676 target = expand_builtin_mempcpy (exp, target, mode);
5677 if (target)
5678 return target;
5679 break;
5681 case BUILT_IN_MEMSET:
5682 target = expand_builtin_memset (exp, target, mode);
5683 if (target)
5684 return target;
5685 break;
5687 case BUILT_IN_BZERO:
5688 target = expand_builtin_bzero (exp);
5689 if (target)
5690 return target;
5691 break;
5693 case BUILT_IN_STRCMP:
5694 target = expand_builtin_strcmp (exp, target);
5695 if (target)
5696 return target;
5697 break;
5699 case BUILT_IN_STRNCMP:
5700 target = expand_builtin_strncmp (exp, target, mode);
5701 if (target)
5702 return target;
5703 break;
5705 case BUILT_IN_BCMP:
5706 case BUILT_IN_MEMCMP:
5707 target = expand_builtin_memcmp (exp, target, mode);
5708 if (target)
5709 return target;
5710 break;
5712 case BUILT_IN_SETJMP:
5713 /* This should have been lowered to the builtins below. */
5714 gcc_unreachable ();
5716 case BUILT_IN_SETJMP_SETUP:
5717 /* __builtin_setjmp_setup is passed a pointer to an array of five words
5718 and the receiver label. */
5719 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5721 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5722 VOIDmode, EXPAND_NORMAL);
5723 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
5724 rtx label_r = label_rtx (label);
5726 /* This is copied from the handling of non-local gotos. */
5727 expand_builtin_setjmp_setup (buf_addr, label_r);
5728 nonlocal_goto_handler_labels
5729 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
5730 nonlocal_goto_handler_labels);
5731 /* ??? Do not let expand_label treat us as such since we would
5732 not want to be both on the list of non-local labels and on
5733 the list of forced labels. */
5734 FORCED_LABEL (label) = 0;
5735 return const0_rtx;
5737 break;
5739 case BUILT_IN_SETJMP_DISPATCHER:
5740 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
5741 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5743 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
5744 rtx label_r = label_rtx (label);
5746 /* Remove the dispatcher label from the list of non-local labels
5747 since the receiver labels have been added to it above. */
5748 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
5749 return const0_rtx;
5751 break;
5753 case BUILT_IN_SETJMP_RECEIVER:
5754 /* __builtin_setjmp_receiver is passed the receiver label. */
5755 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5757 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
5758 rtx label_r = label_rtx (label);
5760 expand_builtin_setjmp_receiver (label_r);
5761 return const0_rtx;
5763 break;
5765 /* __builtin_longjmp is passed a pointer to an array of five words.
5766 It's similar to the C library longjmp function but works with
5767 __builtin_setjmp above. */
5768 case BUILT_IN_LONGJMP:
5769 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5771 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5772 VOIDmode, EXPAND_NORMAL);
5773 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
5775 if (value != const1_rtx)
5777 error ("%<__builtin_longjmp%> second argument must be 1");
5778 return const0_rtx;
5781 expand_builtin_longjmp (buf_addr, value);
5782 return const0_rtx;
5784 break;
5786 case BUILT_IN_NONLOCAL_GOTO:
5787 target = expand_builtin_nonlocal_goto (exp);
5788 if (target)
5789 return target;
5790 break;
5792 /* This updates the setjmp buffer that is its argument with the value
5793 of the current stack pointer. */
5794 case BUILT_IN_UPDATE_SETJMP_BUF:
5795 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5797 rtx buf_addr
5798 = expand_normal (CALL_EXPR_ARG (exp, 0));
5800 expand_builtin_update_setjmp_buf (buf_addr);
5801 return const0_rtx;
5803 break;
5805 case BUILT_IN_TRAP:
5806 expand_builtin_trap ();
5807 return const0_rtx;
5809 case BUILT_IN_UNREACHABLE:
5810 expand_builtin_unreachable ();
5811 return const0_rtx;
5813 CASE_FLT_FN (BUILT_IN_SIGNBIT):
5814 case BUILT_IN_SIGNBITD32:
5815 case BUILT_IN_SIGNBITD64:
5816 case BUILT_IN_SIGNBITD128:
5817 target = expand_builtin_signbit (exp, target);
5818 if (target)
5819 return target;
5820 break;
5822 /* Various hooks for the DWARF 2 __throw routine. */
5823 case BUILT_IN_UNWIND_INIT:
5824 expand_builtin_unwind_init ();
5825 return const0_rtx;
5826 case BUILT_IN_DWARF_CFA:
5827 return virtual_cfa_rtx;
5828 #ifdef DWARF2_UNWIND_INFO
5829 case BUILT_IN_DWARF_SP_COLUMN:
5830 return expand_builtin_dwarf_sp_column ();
5831 case BUILT_IN_INIT_DWARF_REG_SIZES:
5832 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
5833 return const0_rtx;
5834 #endif
5835 case BUILT_IN_FROB_RETURN_ADDR:
5836 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
5837 case BUILT_IN_EXTRACT_RETURN_ADDR:
5838 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
5839 case BUILT_IN_EH_RETURN:
5840 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
5841 CALL_EXPR_ARG (exp, 1));
5842 return const0_rtx;
5843 #ifdef EH_RETURN_DATA_REGNO
5844 case BUILT_IN_EH_RETURN_DATA_REGNO:
5845 return expand_builtin_eh_return_data_regno (exp);
5846 #endif
5847 case BUILT_IN_EXTEND_POINTER:
5848 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
5849 case BUILT_IN_EH_POINTER:
5850 return expand_builtin_eh_pointer (exp);
5851 case BUILT_IN_EH_FILTER:
5852 return expand_builtin_eh_filter (exp);
5853 case BUILT_IN_EH_COPY_VALUES:
5854 return expand_builtin_eh_copy_values (exp);
5856 case BUILT_IN_VA_START:
5857 return expand_builtin_va_start (exp);
5858 case BUILT_IN_VA_END:
5859 return expand_builtin_va_end (exp);
5860 case BUILT_IN_VA_COPY:
5861 return expand_builtin_va_copy (exp);
5862 case BUILT_IN_EXPECT:
5863 return expand_builtin_expect (exp, target);
5864 case BUILT_IN_ASSUME_ALIGNED:
5865 return expand_builtin_assume_aligned (exp, target);
5866 case BUILT_IN_PREFETCH:
5867 expand_builtin_prefetch (exp);
5868 return const0_rtx;
5870 case BUILT_IN_INIT_TRAMPOLINE:
5871 return expand_builtin_init_trampoline (exp);
5872 case BUILT_IN_ADJUST_TRAMPOLINE:
5873 return expand_builtin_adjust_trampoline (exp);
5875 case BUILT_IN_FORK:
5876 case BUILT_IN_EXECL:
5877 case BUILT_IN_EXECV:
5878 case BUILT_IN_EXECLP:
5879 case BUILT_IN_EXECLE:
5880 case BUILT_IN_EXECVP:
5881 case BUILT_IN_EXECVE:
5882 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
5883 if (target)
5884 return target;
5885 break;
5887 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
5888 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
5889 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
5890 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
5891 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
5892 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
5893 target = expand_builtin_sync_operation (mode, exp, PLUS,
5894 false, target, ignore);
5895 if (target)
5896 return target;
5897 break;
5899 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
5900 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
5901 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
5902 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
5903 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
5904 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
5905 target = expand_builtin_sync_operation (mode, exp, MINUS,
5906 false, target, ignore);
5907 if (target)
5908 return target;
5909 break;
5911 case BUILT_IN_SYNC_FETCH_AND_OR_1:
5912 case BUILT_IN_SYNC_FETCH_AND_OR_2:
5913 case BUILT_IN_SYNC_FETCH_AND_OR_4:
5914 case BUILT_IN_SYNC_FETCH_AND_OR_8:
5915 case BUILT_IN_SYNC_FETCH_AND_OR_16:
5916 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
5917 target = expand_builtin_sync_operation (mode, exp, IOR,
5918 false, target, ignore);
5919 if (target)
5920 return target;
5921 break;
5923 case BUILT_IN_SYNC_FETCH_AND_AND_1:
5924 case BUILT_IN_SYNC_FETCH_AND_AND_2:
5925 case BUILT_IN_SYNC_FETCH_AND_AND_4:
5926 case BUILT_IN_SYNC_FETCH_AND_AND_8:
5927 case BUILT_IN_SYNC_FETCH_AND_AND_16:
5928 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
5929 target = expand_builtin_sync_operation (mode, exp, AND,
5930 false, target, ignore);
5931 if (target)
5932 return target;
5933 break;
5935 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
5936 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
5937 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
5938 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
5939 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
5940 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
5941 target = expand_builtin_sync_operation (mode, exp, XOR,
5942 false, target, ignore);
5943 if (target)
5944 return target;
5945 break;
5947 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5948 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5949 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5950 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5951 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5952 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
5953 target = expand_builtin_sync_operation (mode, exp, NOT,
5954 false, target, ignore);
5955 if (target)
5956 return target;
5957 break;
5959 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
5960 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
5961 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
5962 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
5963 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
5964 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
5965 target = expand_builtin_sync_operation (mode, exp, PLUS,
5966 true, target, ignore);
5967 if (target)
5968 return target;
5969 break;
5971 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
5972 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
5973 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
5974 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
5975 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
5976 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
5977 target = expand_builtin_sync_operation (mode, exp, MINUS,
5978 true, target, ignore);
5979 if (target)
5980 return target;
5981 break;
5983 case BUILT_IN_SYNC_OR_AND_FETCH_1:
5984 case BUILT_IN_SYNC_OR_AND_FETCH_2:
5985 case BUILT_IN_SYNC_OR_AND_FETCH_4:
5986 case BUILT_IN_SYNC_OR_AND_FETCH_8:
5987 case BUILT_IN_SYNC_OR_AND_FETCH_16:
5988 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
5989 target = expand_builtin_sync_operation (mode, exp, IOR,
5990 true, target, ignore);
5991 if (target)
5992 return target;
5993 break;
5995 case BUILT_IN_SYNC_AND_AND_FETCH_1:
5996 case BUILT_IN_SYNC_AND_AND_FETCH_2:
5997 case BUILT_IN_SYNC_AND_AND_FETCH_4:
5998 case BUILT_IN_SYNC_AND_AND_FETCH_8:
5999 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6000 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6001 target = expand_builtin_sync_operation (mode, exp, AND,
6002 true, target, ignore);
6003 if (target)
6004 return target;
6005 break;
6007 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6008 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6009 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6010 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6011 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6012 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6013 target = expand_builtin_sync_operation (mode, exp, XOR,
6014 true, target, ignore);
6015 if (target)
6016 return target;
6017 break;
6019 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6020 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6021 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6022 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6023 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6024 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6025 target = expand_builtin_sync_operation (mode, exp, NOT,
6026 true, target, ignore);
6027 if (target)
6028 return target;
6029 break;
6031 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6032 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6033 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6034 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6035 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6036 if (mode == VOIDmode)
6037 mode = TYPE_MODE (boolean_type_node);
6038 if (!target || !register_operand (target, mode))
6039 target = gen_reg_rtx (mode);
6041 mode = get_builtin_sync_mode
6042 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6043 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6044 if (target)
6045 return target;
6046 break;
6048 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6049 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6050 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6051 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6052 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6053 mode = get_builtin_sync_mode
6054 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6055 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6056 if (target)
6057 return target;
6058 break;
6060 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6061 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6062 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6063 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6064 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6065 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6066 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6067 if (target)
6068 return target;
6069 break;
6071 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6072 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6073 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6074 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6075 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6076 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6077 expand_builtin_sync_lock_release (mode, exp);
6078 return const0_rtx;
6080 case BUILT_IN_SYNC_SYNCHRONIZE:
6081 expand_builtin_sync_synchronize ();
6082 return const0_rtx;
6084 case BUILT_IN_OBJECT_SIZE:
6085 return expand_builtin_object_size (exp);
6087 case BUILT_IN_MEMCPY_CHK:
6088 case BUILT_IN_MEMPCPY_CHK:
6089 case BUILT_IN_MEMMOVE_CHK:
6090 case BUILT_IN_MEMSET_CHK:
6091 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6092 if (target)
6093 return target;
6094 break;
6096 case BUILT_IN_STRCPY_CHK:
6097 case BUILT_IN_STPCPY_CHK:
6098 case BUILT_IN_STRNCPY_CHK:
6099 case BUILT_IN_STRCAT_CHK:
6100 case BUILT_IN_STRNCAT_CHK:
6101 case BUILT_IN_SNPRINTF_CHK:
6102 case BUILT_IN_VSNPRINTF_CHK:
6103 maybe_emit_chk_warning (exp, fcode);
6104 break;
6106 case BUILT_IN_SPRINTF_CHK:
6107 case BUILT_IN_VSPRINTF_CHK:
6108 maybe_emit_sprintf_chk_warning (exp, fcode);
6109 break;
6111 case BUILT_IN_FREE:
6112 maybe_emit_free_warning (exp);
6113 break;
6115 default: /* just do library call, if unknown builtin */
6116 break;
6119 /* The switch statement above can drop through to cause the function
6120 to be called normally. */
6121 return expand_call (exp, target, ignore);
6124 /* Determine whether a tree node represents a call to a built-in
6125 function. If the tree T is a call to a built-in function with
6126 the right number of arguments of the appropriate types, return
6127 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6128 Otherwise the return value is END_BUILTINS. */
6130 enum built_in_function
6131 builtin_mathfn_code (const_tree t)
6133 const_tree fndecl, arg, parmlist;
6134 const_tree argtype, parmtype;
6135 const_call_expr_arg_iterator iter;
6137 if (TREE_CODE (t) != CALL_EXPR
6138 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6139 return END_BUILTINS;
6141 fndecl = get_callee_fndecl (t);
6142 if (fndecl == NULL_TREE
6143 || TREE_CODE (fndecl) != FUNCTION_DECL
6144 || ! DECL_BUILT_IN (fndecl)
6145 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6146 return END_BUILTINS;
6148 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6149 init_const_call_expr_arg_iterator (t, &iter);
6150 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6152 /* If a function doesn't take a variable number of arguments,
6153 the last element in the list will have type `void'. */
6154 parmtype = TREE_VALUE (parmlist);
6155 if (VOID_TYPE_P (parmtype))
6157 if (more_const_call_expr_args_p (&iter))
6158 return END_BUILTINS;
6159 return DECL_FUNCTION_CODE (fndecl);
6162 if (! more_const_call_expr_args_p (&iter))
6163 return END_BUILTINS;
6165 arg = next_const_call_expr_arg (&iter);
6166 argtype = TREE_TYPE (arg);
6168 if (SCALAR_FLOAT_TYPE_P (parmtype))
6170 if (! SCALAR_FLOAT_TYPE_P (argtype))
6171 return END_BUILTINS;
6173 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6175 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6176 return END_BUILTINS;
6178 else if (POINTER_TYPE_P (parmtype))
6180 if (! POINTER_TYPE_P (argtype))
6181 return END_BUILTINS;
6183 else if (INTEGRAL_TYPE_P (parmtype))
6185 if (! INTEGRAL_TYPE_P (argtype))
6186 return END_BUILTINS;
6188 else
6189 return END_BUILTINS;
6192 /* Variable-length argument list. */
6193 return DECL_FUNCTION_CODE (fndecl);
6196 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6197 evaluate to a constant. */
6199 static tree
6200 fold_builtin_constant_p (tree arg)
6202 /* We return 1 for a numeric type that's known to be a constant
6203 value at compile-time or for an aggregate type that's a
6204 literal constant. */
6205 STRIP_NOPS (arg);
6207 /* If we know this is a constant, emit the constant of one. */
6208 if (CONSTANT_CLASS_P (arg)
6209 || (TREE_CODE (arg) == CONSTRUCTOR
6210 && TREE_CONSTANT (arg)))
6211 return integer_one_node;
6212 if (TREE_CODE (arg) == ADDR_EXPR)
6214 tree op = TREE_OPERAND (arg, 0);
6215 if (TREE_CODE (op) == STRING_CST
6216 || (TREE_CODE (op) == ARRAY_REF
6217 && integer_zerop (TREE_OPERAND (op, 1))
6218 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6219 return integer_one_node;
6222 /* If this expression has side effects, show we don't know it to be a
6223 constant. Likewise if it's a pointer or aggregate type since in
6224 those case we only want literals, since those are only optimized
6225 when generating RTL, not later.
6226 And finally, if we are compiling an initializer, not code, we
6227 need to return a definite result now; there's not going to be any
6228 more optimization done. */
6229 if (TREE_SIDE_EFFECTS (arg)
6230 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6231 || POINTER_TYPE_P (TREE_TYPE (arg))
6232 || cfun == 0
6233 || folding_initializer)
6234 return integer_zero_node;
6236 return NULL_TREE;
6239 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6240 return it as a truthvalue. */
6242 static tree
6243 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6245 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6247 fn = built_in_decls[BUILT_IN_EXPECT];
6248 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6249 ret_type = TREE_TYPE (TREE_TYPE (fn));
6250 pred_type = TREE_VALUE (arg_types);
6251 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6253 pred = fold_convert_loc (loc, pred_type, pred);
6254 expected = fold_convert_loc (loc, expected_type, expected);
6255 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6257 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6258 build_int_cst (ret_type, 0));
6261 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6262 NULL_TREE if no simplification is possible. */
6264 static tree
6265 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6267 tree inner, fndecl;
6268 enum tree_code code;
6270 /* If this is a builtin_expect within a builtin_expect keep the
6271 inner one. See through a comparison against a constant. It
6272 might have been added to create a thruthvalue. */
6273 inner = arg0;
6274 if (COMPARISON_CLASS_P (inner)
6275 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6276 inner = TREE_OPERAND (inner, 0);
6278 if (TREE_CODE (inner) == CALL_EXPR
6279 && (fndecl = get_callee_fndecl (inner))
6280 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6281 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6282 return arg0;
6284 /* Distribute the expected value over short-circuiting operators.
6285 See through the cast from truthvalue_type_node to long. */
6286 inner = arg0;
6287 while (TREE_CODE (inner) == NOP_EXPR
6288 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6289 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6290 inner = TREE_OPERAND (inner, 0);
6292 code = TREE_CODE (inner);
6293 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6295 tree op0 = TREE_OPERAND (inner, 0);
6296 tree op1 = TREE_OPERAND (inner, 1);
6298 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6299 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6300 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6302 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6305 /* If the argument isn't invariant then there's nothing else we can do. */
6306 if (!TREE_CONSTANT (arg0))
6307 return NULL_TREE;
6309 /* If we expect that a comparison against the argument will fold to
6310 a constant return the constant. In practice, this means a true
6311 constant or the address of a non-weak symbol. */
6312 inner = arg0;
6313 STRIP_NOPS (inner);
6314 if (TREE_CODE (inner) == ADDR_EXPR)
6318 inner = TREE_OPERAND (inner, 0);
6320 while (TREE_CODE (inner) == COMPONENT_REF
6321 || TREE_CODE (inner) == ARRAY_REF);
6322 if ((TREE_CODE (inner) == VAR_DECL
6323 || TREE_CODE (inner) == FUNCTION_DECL)
6324 && DECL_WEAK (inner))
6325 return NULL_TREE;
6328 /* Otherwise, ARG0 already has the proper type for the return value. */
6329 return arg0;
6332 /* Fold a call to __builtin_classify_type with argument ARG. */
6334 static tree
6335 fold_builtin_classify_type (tree arg)
6337 if (arg == 0)
6338 return build_int_cst (integer_type_node, no_type_class);
6340 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
6343 /* Fold a call to __builtin_strlen with argument ARG. */
6345 static tree
6346 fold_builtin_strlen (location_t loc, tree type, tree arg)
6348 if (!validate_arg (arg, POINTER_TYPE))
6349 return NULL_TREE;
6350 else
6352 tree len = c_strlen (arg, 0);
6354 if (len)
6355 return fold_convert_loc (loc, type, len);
6357 return NULL_TREE;
6361 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6363 static tree
6364 fold_builtin_inf (location_t loc, tree type, int warn)
6366 REAL_VALUE_TYPE real;
6368 /* __builtin_inff is intended to be usable to define INFINITY on all
6369 targets. If an infinity is not available, INFINITY expands "to a
6370 positive constant of type float that overflows at translation
6371 time", footnote "In this case, using INFINITY will violate the
6372 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6373 Thus we pedwarn to ensure this constraint violation is
6374 diagnosed. */
6375 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6376 pedwarn (loc, 0, "target format does not support infinity");
6378 real_inf (&real);
6379 return build_real (type, real);
6382 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6384 static tree
6385 fold_builtin_nan (tree arg, tree type, int quiet)
6387 REAL_VALUE_TYPE real;
6388 const char *str;
6390 if (!validate_arg (arg, POINTER_TYPE))
6391 return NULL_TREE;
6392 str = c_getstr (arg);
6393 if (!str)
6394 return NULL_TREE;
6396 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6397 return NULL_TREE;
6399 return build_real (type, real);
6402 /* Return true if the floating point expression T has an integer value.
6403 We also allow +Inf, -Inf and NaN to be considered integer values. */
6405 static bool
6406 integer_valued_real_p (tree t)
6408 switch (TREE_CODE (t))
6410 case FLOAT_EXPR:
6411 return true;
6413 case ABS_EXPR:
6414 case SAVE_EXPR:
6415 return integer_valued_real_p (TREE_OPERAND (t, 0));
6417 case COMPOUND_EXPR:
6418 case MODIFY_EXPR:
6419 case BIND_EXPR:
6420 return integer_valued_real_p (TREE_OPERAND (t, 1));
6422 case PLUS_EXPR:
6423 case MINUS_EXPR:
6424 case MULT_EXPR:
6425 case MIN_EXPR:
6426 case MAX_EXPR:
6427 return integer_valued_real_p (TREE_OPERAND (t, 0))
6428 && integer_valued_real_p (TREE_OPERAND (t, 1));
6430 case COND_EXPR:
6431 return integer_valued_real_p (TREE_OPERAND (t, 1))
6432 && integer_valued_real_p (TREE_OPERAND (t, 2));
6434 case REAL_CST:
6435 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6437 case NOP_EXPR:
6439 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6440 if (TREE_CODE (type) == INTEGER_TYPE)
6441 return true;
6442 if (TREE_CODE (type) == REAL_TYPE)
6443 return integer_valued_real_p (TREE_OPERAND (t, 0));
6444 break;
6447 case CALL_EXPR:
6448 switch (builtin_mathfn_code (t))
6450 CASE_FLT_FN (BUILT_IN_CEIL):
6451 CASE_FLT_FN (BUILT_IN_FLOOR):
6452 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6453 CASE_FLT_FN (BUILT_IN_RINT):
6454 CASE_FLT_FN (BUILT_IN_ROUND):
6455 CASE_FLT_FN (BUILT_IN_TRUNC):
6456 return true;
6458 CASE_FLT_FN (BUILT_IN_FMIN):
6459 CASE_FLT_FN (BUILT_IN_FMAX):
6460 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6461 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6463 default:
6464 break;
6466 break;
6468 default:
6469 break;
6471 return false;
6474 /* FNDECL is assumed to be a builtin where truncation can be propagated
6475 across (for instance floor((double)f) == (double)floorf (f).
6476 Do the transformation for a call with argument ARG. */
6478 static tree
6479 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6481 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6483 if (!validate_arg (arg, REAL_TYPE))
6484 return NULL_TREE;
6486 /* Integer rounding functions are idempotent. */
6487 if (fcode == builtin_mathfn_code (arg))
6488 return arg;
6490 /* If argument is already integer valued, and we don't need to worry
6491 about setting errno, there's no need to perform rounding. */
6492 if (! flag_errno_math && integer_valued_real_p (arg))
6493 return arg;
6495 if (optimize)
6497 tree arg0 = strip_float_extensions (arg);
6498 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6499 tree newtype = TREE_TYPE (arg0);
6500 tree decl;
6502 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6503 && (decl = mathfn_built_in (newtype, fcode)))
6504 return fold_convert_loc (loc, ftype,
6505 build_call_expr_loc (loc, decl, 1,
6506 fold_convert_loc (loc,
6507 newtype,
6508 arg0)));
6510 return NULL_TREE;
6513 /* FNDECL is assumed to be builtin which can narrow the FP type of
6514 the argument, for instance lround((double)f) -> lroundf (f).
6515 Do the transformation for a call with argument ARG. */
6517 static tree
6518 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6520 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6522 if (!validate_arg (arg, REAL_TYPE))
6523 return NULL_TREE;
6525 /* If argument is already integer valued, and we don't need to worry
6526 about setting errno, there's no need to perform rounding. */
6527 if (! flag_errno_math && integer_valued_real_p (arg))
6528 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6529 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6531 if (optimize)
6533 tree ftype = TREE_TYPE (arg);
6534 tree arg0 = strip_float_extensions (arg);
6535 tree newtype = TREE_TYPE (arg0);
6536 tree decl;
6538 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6539 && (decl = mathfn_built_in (newtype, fcode)))
6540 return build_call_expr_loc (loc, decl, 1,
6541 fold_convert_loc (loc, newtype, arg0));
6544 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6545 sizeof (long long) == sizeof (long). */
6546 if (TYPE_PRECISION (long_long_integer_type_node)
6547 == TYPE_PRECISION (long_integer_type_node))
6549 tree newfn = NULL_TREE;
6550 switch (fcode)
6552 CASE_FLT_FN (BUILT_IN_LLCEIL):
6553 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6554 break;
6556 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6557 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6558 break;
6560 CASE_FLT_FN (BUILT_IN_LLROUND):
6561 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6562 break;
6564 CASE_FLT_FN (BUILT_IN_LLRINT):
6565 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6566 break;
6568 default:
6569 break;
6572 if (newfn)
6574 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6575 return fold_convert_loc (loc,
6576 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6580 return NULL_TREE;
6583 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6584 return type. Return NULL_TREE if no simplification can be made. */
6586 static tree
6587 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6589 tree res;
6591 if (!validate_arg (arg, COMPLEX_TYPE)
6592 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6593 return NULL_TREE;
6595 /* Calculate the result when the argument is a constant. */
6596 if (TREE_CODE (arg) == COMPLEX_CST
6597 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6598 type, mpfr_hypot)))
6599 return res;
6601 if (TREE_CODE (arg) == COMPLEX_EXPR)
6603 tree real = TREE_OPERAND (arg, 0);
6604 tree imag = TREE_OPERAND (arg, 1);
6606 /* If either part is zero, cabs is fabs of the other. */
6607 if (real_zerop (real))
6608 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6609 if (real_zerop (imag))
6610 return fold_build1_loc (loc, ABS_EXPR, type, real);
6612 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6613 if (flag_unsafe_math_optimizations
6614 && operand_equal_p (real, imag, OEP_PURE_SAME))
6616 const REAL_VALUE_TYPE sqrt2_trunc
6617 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6618 STRIP_NOPS (real);
6619 return fold_build2_loc (loc, MULT_EXPR, type,
6620 fold_build1_loc (loc, ABS_EXPR, type, real),
6621 build_real (type, sqrt2_trunc));
6625 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6626 if (TREE_CODE (arg) == NEGATE_EXPR
6627 || TREE_CODE (arg) == CONJ_EXPR)
6628 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6630 /* Don't do this when optimizing for size. */
6631 if (flag_unsafe_math_optimizations
6632 && optimize && optimize_function_for_speed_p (cfun))
6634 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6636 if (sqrtfn != NULL_TREE)
6638 tree rpart, ipart, result;
6640 arg = builtin_save_expr (arg);
6642 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6643 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6645 rpart = builtin_save_expr (rpart);
6646 ipart = builtin_save_expr (ipart);
6648 result = fold_build2_loc (loc, PLUS_EXPR, type,
6649 fold_build2_loc (loc, MULT_EXPR, type,
6650 rpart, rpart),
6651 fold_build2_loc (loc, MULT_EXPR, type,
6652 ipart, ipart));
6654 return build_call_expr_loc (loc, sqrtfn, 1, result);
6658 return NULL_TREE;
6661 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
6662 complex tree type of the result. If NEG is true, the imaginary
6663 zero is negative. */
6665 static tree
6666 build_complex_cproj (tree type, bool neg)
6668 REAL_VALUE_TYPE rinf, rzero = dconst0;
6670 real_inf (&rinf);
6671 rzero.sign = neg;
6672 return build_complex (type, build_real (TREE_TYPE (type), rinf),
6673 build_real (TREE_TYPE (type), rzero));
6676 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
6677 return type. Return NULL_TREE if no simplification can be made. */
6679 static tree
6680 fold_builtin_cproj (location_t loc, tree arg, tree type)
6682 if (!validate_arg (arg, COMPLEX_TYPE)
6683 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6684 return NULL_TREE;
6686 /* If there are no infinities, return arg. */
6687 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
6688 return non_lvalue_loc (loc, arg);
6690 /* Calculate the result when the argument is a constant. */
6691 if (TREE_CODE (arg) == COMPLEX_CST)
6693 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
6694 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
6696 if (real_isinf (real) || real_isinf (imag))
6697 return build_complex_cproj (type, imag->sign);
6698 else
6699 return arg;
6701 else if (TREE_CODE (arg) == COMPLEX_EXPR)
6703 tree real = TREE_OPERAND (arg, 0);
6704 tree imag = TREE_OPERAND (arg, 1);
6706 STRIP_NOPS (real);
6707 STRIP_NOPS (imag);
6709 /* If the real part is inf and the imag part is known to be
6710 nonnegative, return (inf + 0i). Remember side-effects are
6711 possible in the imag part. */
6712 if (TREE_CODE (real) == REAL_CST
6713 && real_isinf (TREE_REAL_CST_PTR (real))
6714 && tree_expr_nonnegative_p (imag))
6715 return omit_one_operand_loc (loc, type,
6716 build_complex_cproj (type, false),
6717 arg);
6719 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
6720 Remember side-effects are possible in the real part. */
6721 if (TREE_CODE (imag) == REAL_CST
6722 && real_isinf (TREE_REAL_CST_PTR (imag)))
6723 return
6724 omit_one_operand_loc (loc, type,
6725 build_complex_cproj (type, TREE_REAL_CST_PTR
6726 (imag)->sign), arg);
6729 return NULL_TREE;
6732 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6733 Return NULL_TREE if no simplification can be made. */
6735 static tree
6736 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6739 enum built_in_function fcode;
6740 tree res;
6742 if (!validate_arg (arg, REAL_TYPE))
6743 return NULL_TREE;
6745 /* Calculate the result when the argument is a constant. */
6746 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6747 return res;
6749 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6750 fcode = builtin_mathfn_code (arg);
6751 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6753 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6754 arg = fold_build2_loc (loc, MULT_EXPR, type,
6755 CALL_EXPR_ARG (arg, 0),
6756 build_real (type, dconsthalf));
6757 return build_call_expr_loc (loc, expfn, 1, arg);
6760 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6761 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6763 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6765 if (powfn)
6767 tree arg0 = CALL_EXPR_ARG (arg, 0);
6768 tree tree_root;
6769 /* The inner root was either sqrt or cbrt. */
6770 /* This was a conditional expression but it triggered a bug
6771 in Sun C 5.5. */
6772 REAL_VALUE_TYPE dconstroot;
6773 if (BUILTIN_SQRT_P (fcode))
6774 dconstroot = dconsthalf;
6775 else
6776 dconstroot = dconst_third ();
6778 /* Adjust for the outer root. */
6779 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6780 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6781 tree_root = build_real (type, dconstroot);
6782 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6786 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6787 if (flag_unsafe_math_optimizations
6788 && (fcode == BUILT_IN_POW
6789 || fcode == BUILT_IN_POWF
6790 || fcode == BUILT_IN_POWL))
6792 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6793 tree arg0 = CALL_EXPR_ARG (arg, 0);
6794 tree arg1 = CALL_EXPR_ARG (arg, 1);
6795 tree narg1;
6796 if (!tree_expr_nonnegative_p (arg0))
6797 arg0 = build1 (ABS_EXPR, type, arg0);
6798 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
6799 build_real (type, dconsthalf));
6800 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
6803 return NULL_TREE;
6806 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
6807 Return NULL_TREE if no simplification can be made. */
6809 static tree
6810 fold_builtin_cbrt (location_t loc, tree arg, tree type)
6812 const enum built_in_function fcode = builtin_mathfn_code (arg);
6813 tree res;
6815 if (!validate_arg (arg, REAL_TYPE))
6816 return NULL_TREE;
6818 /* Calculate the result when the argument is a constant. */
6819 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
6820 return res;
6822 if (flag_unsafe_math_optimizations)
6824 /* Optimize cbrt(expN(x)) -> expN(x/3). */
6825 if (BUILTIN_EXPONENT_P (fcode))
6827 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6828 const REAL_VALUE_TYPE third_trunc =
6829 real_value_truncate (TYPE_MODE (type), dconst_third ());
6830 arg = fold_build2_loc (loc, MULT_EXPR, type,
6831 CALL_EXPR_ARG (arg, 0),
6832 build_real (type, third_trunc));
6833 return build_call_expr_loc (loc, expfn, 1, arg);
6836 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
6837 if (BUILTIN_SQRT_P (fcode))
6839 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6841 if (powfn)
6843 tree arg0 = CALL_EXPR_ARG (arg, 0);
6844 tree tree_root;
6845 REAL_VALUE_TYPE dconstroot = dconst_third ();
6847 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6848 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6849 tree_root = build_real (type, dconstroot);
6850 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6854 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
6855 if (BUILTIN_CBRT_P (fcode))
6857 tree arg0 = CALL_EXPR_ARG (arg, 0);
6858 if (tree_expr_nonnegative_p (arg0))
6860 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6862 if (powfn)
6864 tree tree_root;
6865 REAL_VALUE_TYPE dconstroot;
6867 real_arithmetic (&dconstroot, MULT_EXPR,
6868 dconst_third_ptr (), dconst_third_ptr ());
6869 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6870 tree_root = build_real (type, dconstroot);
6871 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6876 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
6877 if (fcode == BUILT_IN_POW
6878 || fcode == BUILT_IN_POWF
6879 || fcode == BUILT_IN_POWL)
6881 tree arg00 = CALL_EXPR_ARG (arg, 0);
6882 tree arg01 = CALL_EXPR_ARG (arg, 1);
6883 if (tree_expr_nonnegative_p (arg00))
6885 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6886 const REAL_VALUE_TYPE dconstroot
6887 = real_value_truncate (TYPE_MODE (type), dconst_third ());
6888 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
6889 build_real (type, dconstroot));
6890 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
6894 return NULL_TREE;
6897 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
6898 TYPE is the type of the return value. Return NULL_TREE if no
6899 simplification can be made. */
6901 static tree
6902 fold_builtin_cos (location_t loc,
6903 tree arg, tree type, tree fndecl)
6905 tree res, narg;
6907 if (!validate_arg (arg, REAL_TYPE))
6908 return NULL_TREE;
6910 /* Calculate the result when the argument is a constant. */
6911 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
6912 return res;
6914 /* Optimize cos(-x) into cos (x). */
6915 if ((narg = fold_strip_sign_ops (arg)))
6916 return build_call_expr_loc (loc, fndecl, 1, narg);
6918 return NULL_TREE;
6921 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
6922 Return NULL_TREE if no simplification can be made. */
6924 static tree
6925 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
6927 if (validate_arg (arg, REAL_TYPE))
6929 tree res, narg;
6931 /* Calculate the result when the argument is a constant. */
6932 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
6933 return res;
6935 /* Optimize cosh(-x) into cosh (x). */
6936 if ((narg = fold_strip_sign_ops (arg)))
6937 return build_call_expr_loc (loc, fndecl, 1, narg);
6940 return NULL_TREE;
6943 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
6944 argument ARG. TYPE is the type of the return value. Return
6945 NULL_TREE if no simplification can be made. */
6947 static tree
6948 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
6949 bool hyper)
6951 if (validate_arg (arg, COMPLEX_TYPE)
6952 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
6954 tree tmp;
6956 /* Calculate the result when the argument is a constant. */
6957 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
6958 return tmp;
6960 /* Optimize fn(-x) into fn(x). */
6961 if ((tmp = fold_strip_sign_ops (arg)))
6962 return build_call_expr_loc (loc, fndecl, 1, tmp);
6965 return NULL_TREE;
6968 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
6969 Return NULL_TREE if no simplification can be made. */
6971 static tree
6972 fold_builtin_tan (tree arg, tree type)
6974 enum built_in_function fcode;
6975 tree res;
6977 if (!validate_arg (arg, REAL_TYPE))
6978 return NULL_TREE;
6980 /* Calculate the result when the argument is a constant. */
6981 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
6982 return res;
6984 /* Optimize tan(atan(x)) = x. */
6985 fcode = builtin_mathfn_code (arg);
6986 if (flag_unsafe_math_optimizations
6987 && (fcode == BUILT_IN_ATAN
6988 || fcode == BUILT_IN_ATANF
6989 || fcode == BUILT_IN_ATANL))
6990 return CALL_EXPR_ARG (arg, 0);
6992 return NULL_TREE;
6995 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
6996 NULL_TREE if no simplification can be made. */
6998 static tree
6999 fold_builtin_sincos (location_t loc,
7000 tree arg0, tree arg1, tree arg2)
7002 tree type;
7003 tree res, fn, call;
7005 if (!validate_arg (arg0, REAL_TYPE)
7006 || !validate_arg (arg1, POINTER_TYPE)
7007 || !validate_arg (arg2, POINTER_TYPE))
7008 return NULL_TREE;
7010 type = TREE_TYPE (arg0);
7012 /* Calculate the result when the argument is a constant. */
7013 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7014 return res;
7016 /* Canonicalize sincos to cexpi. */
7017 if (!TARGET_C99_FUNCTIONS)
7018 return NULL_TREE;
7019 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7020 if (!fn)
7021 return NULL_TREE;
7023 call = build_call_expr_loc (loc, fn, 1, arg0);
7024 call = builtin_save_expr (call);
7026 return build2 (COMPOUND_EXPR, void_type_node,
7027 build2 (MODIFY_EXPR, void_type_node,
7028 build_fold_indirect_ref_loc (loc, arg1),
7029 build1 (IMAGPART_EXPR, type, call)),
7030 build2 (MODIFY_EXPR, void_type_node,
7031 build_fold_indirect_ref_loc (loc, arg2),
7032 build1 (REALPART_EXPR, type, call)));
7035 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7036 NULL_TREE if no simplification can be made. */
7038 static tree
7039 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7041 tree rtype;
7042 tree realp, imagp, ifn;
7043 tree res;
7045 if (!validate_arg (arg0, COMPLEX_TYPE)
7046 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7047 return NULL_TREE;
7049 /* Calculate the result when the argument is a constant. */
7050 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7051 return res;
7053 rtype = TREE_TYPE (TREE_TYPE (arg0));
7055 /* In case we can figure out the real part of arg0 and it is constant zero
7056 fold to cexpi. */
7057 if (!TARGET_C99_FUNCTIONS)
7058 return NULL_TREE;
7059 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7060 if (!ifn)
7061 return NULL_TREE;
7063 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7064 && real_zerop (realp))
7066 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7067 return build_call_expr_loc (loc, ifn, 1, narg);
7070 /* In case we can easily decompose real and imaginary parts split cexp
7071 to exp (r) * cexpi (i). */
7072 if (flag_unsafe_math_optimizations
7073 && realp)
7075 tree rfn, rcall, icall;
7077 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7078 if (!rfn)
7079 return NULL_TREE;
7081 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7082 if (!imagp)
7083 return NULL_TREE;
7085 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7086 icall = builtin_save_expr (icall);
7087 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7088 rcall = builtin_save_expr (rcall);
7089 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7090 fold_build2_loc (loc, MULT_EXPR, rtype,
7091 rcall,
7092 fold_build1_loc (loc, REALPART_EXPR,
7093 rtype, icall)),
7094 fold_build2_loc (loc, MULT_EXPR, rtype,
7095 rcall,
7096 fold_build1_loc (loc, IMAGPART_EXPR,
7097 rtype, icall)));
7100 return NULL_TREE;
7103 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7104 Return NULL_TREE if no simplification can be made. */
7106 static tree
7107 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7109 if (!validate_arg (arg, REAL_TYPE))
7110 return NULL_TREE;
7112 /* Optimize trunc of constant value. */
7113 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7115 REAL_VALUE_TYPE r, x;
7116 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7118 x = TREE_REAL_CST (arg);
7119 real_trunc (&r, TYPE_MODE (type), &x);
7120 return build_real (type, r);
7123 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7126 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7127 Return NULL_TREE if no simplification can be made. */
7129 static tree
7130 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7132 if (!validate_arg (arg, REAL_TYPE))
7133 return NULL_TREE;
7135 /* Optimize floor of constant value. */
7136 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7138 REAL_VALUE_TYPE x;
7140 x = TREE_REAL_CST (arg);
7141 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7143 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7144 REAL_VALUE_TYPE r;
7146 real_floor (&r, TYPE_MODE (type), &x);
7147 return build_real (type, r);
7151 /* Fold floor (x) where x is nonnegative to trunc (x). */
7152 if (tree_expr_nonnegative_p (arg))
7154 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7155 if (truncfn)
7156 return build_call_expr_loc (loc, truncfn, 1, arg);
7159 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7162 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7163 Return NULL_TREE if no simplification can be made. */
7165 static tree
7166 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7168 if (!validate_arg (arg, REAL_TYPE))
7169 return NULL_TREE;
7171 /* Optimize ceil of constant value. */
7172 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7174 REAL_VALUE_TYPE x;
7176 x = TREE_REAL_CST (arg);
7177 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7179 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7180 REAL_VALUE_TYPE r;
7182 real_ceil (&r, TYPE_MODE (type), &x);
7183 return build_real (type, r);
7187 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7190 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7191 Return NULL_TREE if no simplification can be made. */
7193 static tree
7194 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7196 if (!validate_arg (arg, REAL_TYPE))
7197 return NULL_TREE;
7199 /* Optimize round of constant value. */
7200 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7202 REAL_VALUE_TYPE x;
7204 x = TREE_REAL_CST (arg);
7205 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7207 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7208 REAL_VALUE_TYPE r;
7210 real_round (&r, TYPE_MODE (type), &x);
7211 return build_real (type, r);
7215 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7218 /* Fold function call to builtin lround, lroundf or lroundl (or the
7219 corresponding long long versions) and other rounding functions. ARG
7220 is the argument to the call. Return NULL_TREE if no simplification
7221 can be made. */
7223 static tree
7224 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7226 if (!validate_arg (arg, REAL_TYPE))
7227 return NULL_TREE;
7229 /* Optimize lround of constant value. */
7230 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7232 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7234 if (real_isfinite (&x))
7236 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7237 tree ftype = TREE_TYPE (arg);
7238 double_int val;
7239 REAL_VALUE_TYPE r;
7241 switch (DECL_FUNCTION_CODE (fndecl))
7243 CASE_FLT_FN (BUILT_IN_LFLOOR):
7244 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7245 real_floor (&r, TYPE_MODE (ftype), &x);
7246 break;
7248 CASE_FLT_FN (BUILT_IN_LCEIL):
7249 CASE_FLT_FN (BUILT_IN_LLCEIL):
7250 real_ceil (&r, TYPE_MODE (ftype), &x);
7251 break;
7253 CASE_FLT_FN (BUILT_IN_LROUND):
7254 CASE_FLT_FN (BUILT_IN_LLROUND):
7255 real_round (&r, TYPE_MODE (ftype), &x);
7256 break;
7258 default:
7259 gcc_unreachable ();
7262 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
7263 if (double_int_fits_to_tree_p (itype, val))
7264 return double_int_to_tree (itype, val);
7268 switch (DECL_FUNCTION_CODE (fndecl))
7270 CASE_FLT_FN (BUILT_IN_LFLOOR):
7271 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7272 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7273 if (tree_expr_nonnegative_p (arg))
7274 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7275 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7276 break;
7277 default:;
7280 return fold_fixed_mathfn (loc, fndecl, arg);
7283 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7284 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7285 the argument to the call. Return NULL_TREE if no simplification can
7286 be made. */
7288 static tree
7289 fold_builtin_bitop (tree fndecl, tree arg)
7291 if (!validate_arg (arg, INTEGER_TYPE))
7292 return NULL_TREE;
7294 /* Optimize for constant argument. */
7295 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7297 HOST_WIDE_INT hi, width, result;
7298 unsigned HOST_WIDE_INT lo;
7299 tree type;
7301 type = TREE_TYPE (arg);
7302 width = TYPE_PRECISION (type);
7303 lo = TREE_INT_CST_LOW (arg);
7305 /* Clear all the bits that are beyond the type's precision. */
7306 if (width > HOST_BITS_PER_WIDE_INT)
7308 hi = TREE_INT_CST_HIGH (arg);
7309 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7310 hi &= ~((unsigned HOST_WIDE_INT) (-1)
7311 << (width - HOST_BITS_PER_WIDE_INT));
7313 else
7315 hi = 0;
7316 if (width < HOST_BITS_PER_WIDE_INT)
7317 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7320 switch (DECL_FUNCTION_CODE (fndecl))
7322 CASE_INT_FN (BUILT_IN_FFS):
7323 if (lo != 0)
7324 result = ffs_hwi (lo);
7325 else if (hi != 0)
7326 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
7327 else
7328 result = 0;
7329 break;
7331 CASE_INT_FN (BUILT_IN_CLZ):
7332 if (hi != 0)
7333 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7334 else if (lo != 0)
7335 result = width - floor_log2 (lo) - 1;
7336 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7337 result = width;
7338 break;
7340 CASE_INT_FN (BUILT_IN_CTZ):
7341 if (lo != 0)
7342 result = ctz_hwi (lo);
7343 else if (hi != 0)
7344 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
7345 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7346 result = width;
7347 break;
7349 CASE_INT_FN (BUILT_IN_CLRSB):
7350 if (width > HOST_BITS_PER_WIDE_INT
7351 && (hi & ((unsigned HOST_WIDE_INT) 1
7352 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
7354 hi = ~hi & ~((unsigned HOST_WIDE_INT) (-1)
7355 << (width - HOST_BITS_PER_WIDE_INT - 1));
7356 lo = ~lo;
7358 else if (width <= HOST_BITS_PER_WIDE_INT
7359 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
7360 lo = ~lo & ~((unsigned HOST_WIDE_INT) (-1) << (width - 1));
7361 if (hi != 0)
7362 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
7363 else if (lo != 0)
7364 result = width - floor_log2 (lo) - 2;
7365 else
7366 result = width - 1;
7367 break;
7369 CASE_INT_FN (BUILT_IN_POPCOUNT):
7370 result = 0;
7371 while (lo)
7372 result++, lo &= lo - 1;
7373 while (hi)
7374 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7375 break;
7377 CASE_INT_FN (BUILT_IN_PARITY):
7378 result = 0;
7379 while (lo)
7380 result++, lo &= lo - 1;
7381 while (hi)
7382 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
7383 result &= 1;
7384 break;
7386 default:
7387 gcc_unreachable ();
7390 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7393 return NULL_TREE;
7396 /* Fold function call to builtin_bswap and the long and long long
7397 variants. Return NULL_TREE if no simplification can be made. */
7398 static tree
7399 fold_builtin_bswap (tree fndecl, tree arg)
7401 if (! validate_arg (arg, INTEGER_TYPE))
7402 return NULL_TREE;
7404 /* Optimize constant value. */
7405 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7407 HOST_WIDE_INT hi, width, r_hi = 0;
7408 unsigned HOST_WIDE_INT lo, r_lo = 0;
7409 tree type;
7411 type = TREE_TYPE (arg);
7412 width = TYPE_PRECISION (type);
7413 lo = TREE_INT_CST_LOW (arg);
7414 hi = TREE_INT_CST_HIGH (arg);
7416 switch (DECL_FUNCTION_CODE (fndecl))
7418 case BUILT_IN_BSWAP32:
7419 case BUILT_IN_BSWAP64:
7421 int s;
7423 for (s = 0; s < width; s += 8)
7425 int d = width - s - 8;
7426 unsigned HOST_WIDE_INT byte;
7428 if (s < HOST_BITS_PER_WIDE_INT)
7429 byte = (lo >> s) & 0xff;
7430 else
7431 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7433 if (d < HOST_BITS_PER_WIDE_INT)
7434 r_lo |= byte << d;
7435 else
7436 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7440 break;
7442 default:
7443 gcc_unreachable ();
7446 if (width < HOST_BITS_PER_WIDE_INT)
7447 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7448 else
7449 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7452 return NULL_TREE;
7455 /* A subroutine of fold_builtin to fold the various logarithmic
7456 functions. Return NULL_TREE if no simplification can me made.
7457 FUNC is the corresponding MPFR logarithm function. */
7459 static tree
7460 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7461 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7463 if (validate_arg (arg, REAL_TYPE))
7465 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7466 tree res;
7467 const enum built_in_function fcode = builtin_mathfn_code (arg);
7469 /* Calculate the result when the argument is a constant. */
7470 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7471 return res;
7473 /* Special case, optimize logN(expN(x)) = x. */
7474 if (flag_unsafe_math_optimizations
7475 && ((func == mpfr_log
7476 && (fcode == BUILT_IN_EXP
7477 || fcode == BUILT_IN_EXPF
7478 || fcode == BUILT_IN_EXPL))
7479 || (func == mpfr_log2
7480 && (fcode == BUILT_IN_EXP2
7481 || fcode == BUILT_IN_EXP2F
7482 || fcode == BUILT_IN_EXP2L))
7483 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7484 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7486 /* Optimize logN(func()) for various exponential functions. We
7487 want to determine the value "x" and the power "exponent" in
7488 order to transform logN(x**exponent) into exponent*logN(x). */
7489 if (flag_unsafe_math_optimizations)
7491 tree exponent = 0, x = 0;
7493 switch (fcode)
7495 CASE_FLT_FN (BUILT_IN_EXP):
7496 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7497 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7498 dconst_e ()));
7499 exponent = CALL_EXPR_ARG (arg, 0);
7500 break;
7501 CASE_FLT_FN (BUILT_IN_EXP2):
7502 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7503 x = build_real (type, dconst2);
7504 exponent = CALL_EXPR_ARG (arg, 0);
7505 break;
7506 CASE_FLT_FN (BUILT_IN_EXP10):
7507 CASE_FLT_FN (BUILT_IN_POW10):
7508 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7510 REAL_VALUE_TYPE dconst10;
7511 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7512 x = build_real (type, dconst10);
7514 exponent = CALL_EXPR_ARG (arg, 0);
7515 break;
7516 CASE_FLT_FN (BUILT_IN_SQRT):
7517 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7518 x = CALL_EXPR_ARG (arg, 0);
7519 exponent = build_real (type, dconsthalf);
7520 break;
7521 CASE_FLT_FN (BUILT_IN_CBRT):
7522 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7523 x = CALL_EXPR_ARG (arg, 0);
7524 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7525 dconst_third ()));
7526 break;
7527 CASE_FLT_FN (BUILT_IN_POW):
7528 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7529 x = CALL_EXPR_ARG (arg, 0);
7530 exponent = CALL_EXPR_ARG (arg, 1);
7531 break;
7532 default:
7533 break;
7536 /* Now perform the optimization. */
7537 if (x && exponent)
7539 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7540 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7545 return NULL_TREE;
7548 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7549 NULL_TREE if no simplification can be made. */
7551 static tree
7552 fold_builtin_hypot (location_t loc, tree fndecl,
7553 tree arg0, tree arg1, tree type)
7555 tree res, narg0, narg1;
7557 if (!validate_arg (arg0, REAL_TYPE)
7558 || !validate_arg (arg1, REAL_TYPE))
7559 return NULL_TREE;
7561 /* Calculate the result when the argument is a constant. */
7562 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7563 return res;
7565 /* If either argument to hypot has a negate or abs, strip that off.
7566 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7567 narg0 = fold_strip_sign_ops (arg0);
7568 narg1 = fold_strip_sign_ops (arg1);
7569 if (narg0 || narg1)
7571 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7572 narg1 ? narg1 : arg1);
7575 /* If either argument is zero, hypot is fabs of the other. */
7576 if (real_zerop (arg0))
7577 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7578 else if (real_zerop (arg1))
7579 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7581 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7582 if (flag_unsafe_math_optimizations
7583 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7585 const REAL_VALUE_TYPE sqrt2_trunc
7586 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7587 return fold_build2_loc (loc, MULT_EXPR, type,
7588 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7589 build_real (type, sqrt2_trunc));
7592 return NULL_TREE;
7596 /* Fold a builtin function call to pow, powf, or powl. Return
7597 NULL_TREE if no simplification can be made. */
7598 static tree
7599 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7601 tree res;
7603 if (!validate_arg (arg0, REAL_TYPE)
7604 || !validate_arg (arg1, REAL_TYPE))
7605 return NULL_TREE;
7607 /* Calculate the result when the argument is a constant. */
7608 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7609 return res;
7611 /* Optimize pow(1.0,y) = 1.0. */
7612 if (real_onep (arg0))
7613 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7615 if (TREE_CODE (arg1) == REAL_CST
7616 && !TREE_OVERFLOW (arg1))
7618 REAL_VALUE_TYPE cint;
7619 REAL_VALUE_TYPE c;
7620 HOST_WIDE_INT n;
7622 c = TREE_REAL_CST (arg1);
7624 /* Optimize pow(x,0.0) = 1.0. */
7625 if (REAL_VALUES_EQUAL (c, dconst0))
7626 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7627 arg0);
7629 /* Optimize pow(x,1.0) = x. */
7630 if (REAL_VALUES_EQUAL (c, dconst1))
7631 return arg0;
7633 /* Optimize pow(x,-1.0) = 1.0/x. */
7634 if (REAL_VALUES_EQUAL (c, dconstm1))
7635 return fold_build2_loc (loc, RDIV_EXPR, type,
7636 build_real (type, dconst1), arg0);
7638 /* Optimize pow(x,0.5) = sqrt(x). */
7639 if (flag_unsafe_math_optimizations
7640 && REAL_VALUES_EQUAL (c, dconsthalf))
7642 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7644 if (sqrtfn != NULL_TREE)
7645 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7648 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7649 if (flag_unsafe_math_optimizations)
7651 const REAL_VALUE_TYPE dconstroot
7652 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7654 if (REAL_VALUES_EQUAL (c, dconstroot))
7656 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7657 if (cbrtfn != NULL_TREE)
7658 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7662 /* Check for an integer exponent. */
7663 n = real_to_integer (&c);
7664 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7665 if (real_identical (&c, &cint))
7667 /* Attempt to evaluate pow at compile-time, unless this should
7668 raise an exception. */
7669 if (TREE_CODE (arg0) == REAL_CST
7670 && !TREE_OVERFLOW (arg0)
7671 && (n > 0
7672 || (!flag_trapping_math && !flag_errno_math)
7673 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7675 REAL_VALUE_TYPE x;
7676 bool inexact;
7678 x = TREE_REAL_CST (arg0);
7679 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7680 if (flag_unsafe_math_optimizations || !inexact)
7681 return build_real (type, x);
7684 /* Strip sign ops from even integer powers. */
7685 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7687 tree narg0 = fold_strip_sign_ops (arg0);
7688 if (narg0)
7689 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7694 if (flag_unsafe_math_optimizations)
7696 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7698 /* Optimize pow(expN(x),y) = expN(x*y). */
7699 if (BUILTIN_EXPONENT_P (fcode))
7701 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7702 tree arg = CALL_EXPR_ARG (arg0, 0);
7703 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7704 return build_call_expr_loc (loc, expfn, 1, arg);
7707 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7708 if (BUILTIN_SQRT_P (fcode))
7710 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7711 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7712 build_real (type, dconsthalf));
7713 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7716 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7717 if (BUILTIN_CBRT_P (fcode))
7719 tree arg = CALL_EXPR_ARG (arg0, 0);
7720 if (tree_expr_nonnegative_p (arg))
7722 const REAL_VALUE_TYPE dconstroot
7723 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7724 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7725 build_real (type, dconstroot));
7726 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7730 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7731 if (fcode == BUILT_IN_POW
7732 || fcode == BUILT_IN_POWF
7733 || fcode == BUILT_IN_POWL)
7735 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7736 if (tree_expr_nonnegative_p (arg00))
7738 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7739 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7740 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7745 return NULL_TREE;
7748 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7749 Return NULL_TREE if no simplification can be made. */
7750 static tree
7751 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7752 tree arg0, tree arg1, tree type)
7754 if (!validate_arg (arg0, REAL_TYPE)
7755 || !validate_arg (arg1, INTEGER_TYPE))
7756 return NULL_TREE;
7758 /* Optimize pow(1.0,y) = 1.0. */
7759 if (real_onep (arg0))
7760 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7762 if (host_integerp (arg1, 0))
7764 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7766 /* Evaluate powi at compile-time. */
7767 if (TREE_CODE (arg0) == REAL_CST
7768 && !TREE_OVERFLOW (arg0))
7770 REAL_VALUE_TYPE x;
7771 x = TREE_REAL_CST (arg0);
7772 real_powi (&x, TYPE_MODE (type), &x, c);
7773 return build_real (type, x);
7776 /* Optimize pow(x,0) = 1.0. */
7777 if (c == 0)
7778 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7779 arg0);
7781 /* Optimize pow(x,1) = x. */
7782 if (c == 1)
7783 return arg0;
7785 /* Optimize pow(x,-1) = 1.0/x. */
7786 if (c == -1)
7787 return fold_build2_loc (loc, RDIV_EXPR, type,
7788 build_real (type, dconst1), arg0);
7791 return NULL_TREE;
7794 /* A subroutine of fold_builtin to fold the various exponent
7795 functions. Return NULL_TREE if no simplification can be made.
7796 FUNC is the corresponding MPFR exponent function. */
7798 static tree
7799 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7800 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7802 if (validate_arg (arg, REAL_TYPE))
7804 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7805 tree res;
7807 /* Calculate the result when the argument is a constant. */
7808 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
7809 return res;
7811 /* Optimize expN(logN(x)) = x. */
7812 if (flag_unsafe_math_optimizations)
7814 const enum built_in_function fcode = builtin_mathfn_code (arg);
7816 if ((func == mpfr_exp
7817 && (fcode == BUILT_IN_LOG
7818 || fcode == BUILT_IN_LOGF
7819 || fcode == BUILT_IN_LOGL))
7820 || (func == mpfr_exp2
7821 && (fcode == BUILT_IN_LOG2
7822 || fcode == BUILT_IN_LOG2F
7823 || fcode == BUILT_IN_LOG2L))
7824 || (func == mpfr_exp10
7825 && (fcode == BUILT_IN_LOG10
7826 || fcode == BUILT_IN_LOG10F
7827 || fcode == BUILT_IN_LOG10L)))
7828 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7832 return NULL_TREE;
7835 /* Return true if VAR is a VAR_DECL or a component thereof. */
7837 static bool
7838 var_decl_component_p (tree var)
7840 tree inner = var;
7841 while (handled_component_p (inner))
7842 inner = TREE_OPERAND (inner, 0);
7843 return SSA_VAR_P (inner);
7846 /* Fold function call to builtin memset. Return
7847 NULL_TREE if no simplification can be made. */
7849 static tree
7850 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
7851 tree type, bool ignore)
7853 tree var, ret, etype;
7854 unsigned HOST_WIDE_INT length, cval;
7856 if (! validate_arg (dest, POINTER_TYPE)
7857 || ! validate_arg (c, INTEGER_TYPE)
7858 || ! validate_arg (len, INTEGER_TYPE))
7859 return NULL_TREE;
7861 if (! host_integerp (len, 1))
7862 return NULL_TREE;
7864 /* If the LEN parameter is zero, return DEST. */
7865 if (integer_zerop (len))
7866 return omit_one_operand_loc (loc, type, dest, c);
7868 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
7869 return NULL_TREE;
7871 var = dest;
7872 STRIP_NOPS (var);
7873 if (TREE_CODE (var) != ADDR_EXPR)
7874 return NULL_TREE;
7876 var = TREE_OPERAND (var, 0);
7877 if (TREE_THIS_VOLATILE (var))
7878 return NULL_TREE;
7880 etype = TREE_TYPE (var);
7881 if (TREE_CODE (etype) == ARRAY_TYPE)
7882 etype = TREE_TYPE (etype);
7884 if (!INTEGRAL_TYPE_P (etype)
7885 && !POINTER_TYPE_P (etype))
7886 return NULL_TREE;
7888 if (! var_decl_component_p (var))
7889 return NULL_TREE;
7891 length = tree_low_cst (len, 1);
7892 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
7893 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
7894 < length)
7895 return NULL_TREE;
7897 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
7898 return NULL_TREE;
7900 if (integer_zerop (c))
7901 cval = 0;
7902 else
7904 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
7905 return NULL_TREE;
7907 cval = TREE_INT_CST_LOW (c);
7908 cval &= 0xff;
7909 cval |= cval << 8;
7910 cval |= cval << 16;
7911 cval |= (cval << 31) << 1;
7914 ret = build_int_cst_type (etype, cval);
7915 var = build_fold_indirect_ref_loc (loc,
7916 fold_convert_loc (loc,
7917 build_pointer_type (etype),
7918 dest));
7919 ret = build2 (MODIFY_EXPR, etype, var, ret);
7920 if (ignore)
7921 return ret;
7923 return omit_one_operand_loc (loc, type, dest, ret);
7926 /* Fold function call to builtin memset. Return
7927 NULL_TREE if no simplification can be made. */
7929 static tree
7930 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
7932 if (! validate_arg (dest, POINTER_TYPE)
7933 || ! validate_arg (size, INTEGER_TYPE))
7934 return NULL_TREE;
7936 if (!ignore)
7937 return NULL_TREE;
7939 /* New argument list transforming bzero(ptr x, int y) to
7940 memset(ptr x, int 0, size_t y). This is done this way
7941 so that if it isn't expanded inline, we fallback to
7942 calling bzero instead of memset. */
7944 return fold_builtin_memset (loc, dest, integer_zero_node,
7945 fold_convert_loc (loc, sizetype, size),
7946 void_type_node, ignore);
7949 /* Fold function call to builtin mem{{,p}cpy,move}. Return
7950 NULL_TREE if no simplification can be made.
7951 If ENDP is 0, return DEST (like memcpy).
7952 If ENDP is 1, return DEST+LEN (like mempcpy).
7953 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
7954 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
7955 (memmove). */
7957 static tree
7958 fold_builtin_memory_op (location_t loc, tree dest, tree src,
7959 tree len, tree type, bool ignore, int endp)
7961 tree destvar, srcvar, expr;
7963 if (! validate_arg (dest, POINTER_TYPE)
7964 || ! validate_arg (src, POINTER_TYPE)
7965 || ! validate_arg (len, INTEGER_TYPE))
7966 return NULL_TREE;
7968 /* If the LEN parameter is zero, return DEST. */
7969 if (integer_zerop (len))
7970 return omit_one_operand_loc (loc, type, dest, src);
7972 /* If SRC and DEST are the same (and not volatile), return
7973 DEST{,+LEN,+LEN-1}. */
7974 if (operand_equal_p (src, dest, 0))
7975 expr = len;
7976 else
7978 tree srctype, desttype;
7979 unsigned int src_align, dest_align;
7980 tree off0;
7982 if (endp == 3)
7984 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
7985 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
7987 /* Both DEST and SRC must be pointer types.
7988 ??? This is what old code did. Is the testing for pointer types
7989 really mandatory?
7991 If either SRC is readonly or length is 1, we can use memcpy. */
7992 if (!dest_align || !src_align)
7993 return NULL_TREE;
7994 if (readonly_data_expr (src)
7995 || (host_integerp (len, 1)
7996 && (MIN (src_align, dest_align) / BITS_PER_UNIT
7997 >= (unsigned HOST_WIDE_INT) tree_low_cst (len, 1))))
7999 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8000 if (!fn)
8001 return NULL_TREE;
8002 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8005 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8006 if (TREE_CODE (src) == ADDR_EXPR
8007 && TREE_CODE (dest) == ADDR_EXPR)
8009 tree src_base, dest_base, fn;
8010 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8011 HOST_WIDE_INT size = -1;
8012 HOST_WIDE_INT maxsize = -1;
8014 srcvar = TREE_OPERAND (src, 0);
8015 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8016 &size, &maxsize);
8017 destvar = TREE_OPERAND (dest, 0);
8018 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8019 &size, &maxsize);
8020 if (host_integerp (len, 1))
8021 maxsize = tree_low_cst (len, 1);
8022 else
8023 maxsize = -1;
8024 src_offset /= BITS_PER_UNIT;
8025 dest_offset /= BITS_PER_UNIT;
8026 if (SSA_VAR_P (src_base)
8027 && SSA_VAR_P (dest_base))
8029 if (operand_equal_p (src_base, dest_base, 0)
8030 && ranges_overlap_p (src_offset, maxsize,
8031 dest_offset, maxsize))
8032 return NULL_TREE;
8034 else if (TREE_CODE (src_base) == MEM_REF
8035 && TREE_CODE (dest_base) == MEM_REF)
8037 double_int off;
8038 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8039 TREE_OPERAND (dest_base, 0), 0))
8040 return NULL_TREE;
8041 off = double_int_add (mem_ref_offset (src_base),
8042 shwi_to_double_int (src_offset));
8043 if (!double_int_fits_in_shwi_p (off))
8044 return NULL_TREE;
8045 src_offset = off.low;
8046 off = double_int_add (mem_ref_offset (dest_base),
8047 shwi_to_double_int (dest_offset));
8048 if (!double_int_fits_in_shwi_p (off))
8049 return NULL_TREE;
8050 dest_offset = off.low;
8051 if (ranges_overlap_p (src_offset, maxsize,
8052 dest_offset, maxsize))
8053 return NULL_TREE;
8055 else
8056 return NULL_TREE;
8058 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8059 if (!fn)
8060 return NULL_TREE;
8061 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8064 /* If the destination and source do not alias optimize into
8065 memcpy as well. */
8066 if ((is_gimple_min_invariant (dest)
8067 || TREE_CODE (dest) == SSA_NAME)
8068 && (is_gimple_min_invariant (src)
8069 || TREE_CODE (src) == SSA_NAME))
8071 ao_ref destr, srcr;
8072 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8073 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8074 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8076 tree fn;
8077 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8078 if (!fn)
8079 return NULL_TREE;
8080 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8084 return NULL_TREE;
8087 if (!host_integerp (len, 0))
8088 return NULL_TREE;
8089 /* FIXME:
8090 This logic lose for arguments like (type *)malloc (sizeof (type)),
8091 since we strip the casts of up to VOID return value from malloc.
8092 Perhaps we ought to inherit type from non-VOID argument here? */
8093 STRIP_NOPS (src);
8094 STRIP_NOPS (dest);
8095 if (!POINTER_TYPE_P (TREE_TYPE (src))
8096 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8097 return NULL_TREE;
8098 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8099 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8101 tree tem = TREE_OPERAND (src, 0);
8102 STRIP_NOPS (tem);
8103 if (tem != TREE_OPERAND (src, 0))
8104 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8106 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8108 tree tem = TREE_OPERAND (dest, 0);
8109 STRIP_NOPS (tem);
8110 if (tem != TREE_OPERAND (dest, 0))
8111 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8113 srctype = TREE_TYPE (TREE_TYPE (src));
8114 if (TREE_CODE (srctype) == ARRAY_TYPE
8115 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8117 srctype = TREE_TYPE (srctype);
8118 STRIP_NOPS (src);
8119 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8121 desttype = TREE_TYPE (TREE_TYPE (dest));
8122 if (TREE_CODE (desttype) == ARRAY_TYPE
8123 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8125 desttype = TREE_TYPE (desttype);
8126 STRIP_NOPS (dest);
8127 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8129 if (TREE_ADDRESSABLE (srctype)
8130 || TREE_ADDRESSABLE (desttype))
8131 return NULL_TREE;
8133 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8134 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8135 if (dest_align < TYPE_ALIGN (desttype)
8136 || src_align < TYPE_ALIGN (srctype))
8137 return NULL_TREE;
8139 if (!ignore)
8140 dest = builtin_save_expr (dest);
8142 /* Build accesses at offset zero with a ref-all character type. */
8143 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8144 ptr_mode, true), 0);
8146 destvar = dest;
8147 STRIP_NOPS (destvar);
8148 if (TREE_CODE (destvar) == ADDR_EXPR
8149 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8150 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8151 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8152 else
8153 destvar = NULL_TREE;
8155 srcvar = src;
8156 STRIP_NOPS (srcvar);
8157 if (TREE_CODE (srcvar) == ADDR_EXPR
8158 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8159 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8161 if (!destvar
8162 || src_align >= TYPE_ALIGN (desttype))
8163 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8164 srcvar, off0);
8165 else if (!STRICT_ALIGNMENT)
8167 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8168 src_align);
8169 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8171 else
8172 srcvar = NULL_TREE;
8174 else
8175 srcvar = NULL_TREE;
8177 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8178 return NULL_TREE;
8180 if (srcvar == NULL_TREE)
8182 STRIP_NOPS (src);
8183 if (src_align >= TYPE_ALIGN (desttype))
8184 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
8185 else
8187 if (STRICT_ALIGNMENT)
8188 return NULL_TREE;
8189 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8190 src_align);
8191 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
8194 else if (destvar == NULL_TREE)
8196 STRIP_NOPS (dest);
8197 if (dest_align >= TYPE_ALIGN (srctype))
8198 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
8199 else
8201 if (STRICT_ALIGNMENT)
8202 return NULL_TREE;
8203 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
8204 dest_align);
8205 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
8209 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
8212 if (ignore)
8213 return expr;
8215 if (endp == 0 || endp == 3)
8216 return omit_one_operand_loc (loc, type, dest, expr);
8218 if (expr == len)
8219 expr = NULL_TREE;
8221 if (endp == 2)
8222 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8223 ssize_int (1));
8225 len = fold_convert_loc (loc, sizetype, len);
8226 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8227 dest = fold_convert_loc (loc, type, dest);
8228 if (expr)
8229 dest = omit_one_operand_loc (loc, type, dest, expr);
8230 return dest;
8233 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8234 If LEN is not NULL, it represents the length of the string to be
8235 copied. Return NULL_TREE if no simplification can be made. */
8237 tree
8238 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8240 tree fn;
8242 if (!validate_arg (dest, POINTER_TYPE)
8243 || !validate_arg (src, POINTER_TYPE))
8244 return NULL_TREE;
8246 /* If SRC and DEST are the same (and not volatile), return DEST. */
8247 if (operand_equal_p (src, dest, 0))
8248 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8250 if (optimize_function_for_size_p (cfun))
8251 return NULL_TREE;
8253 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8254 if (!fn)
8255 return NULL_TREE;
8257 if (!len)
8259 len = c_strlen (src, 1);
8260 if (! len || TREE_SIDE_EFFECTS (len))
8261 return NULL_TREE;
8264 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8265 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8266 build_call_expr_loc (loc, fn, 3, dest, src, len));
8269 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8270 Return NULL_TREE if no simplification can be made. */
8272 static tree
8273 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8275 tree fn, len, lenp1, call, type;
8277 if (!validate_arg (dest, POINTER_TYPE)
8278 || !validate_arg (src, POINTER_TYPE))
8279 return NULL_TREE;
8281 len = c_strlen (src, 1);
8282 if (!len
8283 || TREE_CODE (len) != INTEGER_CST)
8284 return NULL_TREE;
8286 if (optimize_function_for_size_p (cfun)
8287 /* If length is zero it's small enough. */
8288 && !integer_zerop (len))
8289 return NULL_TREE;
8291 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8292 if (!fn)
8293 return NULL_TREE;
8295 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8296 /* We use dest twice in building our expression. Save it from
8297 multiple expansions. */
8298 dest = builtin_save_expr (dest);
8299 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8301 type = TREE_TYPE (TREE_TYPE (fndecl));
8302 len = fold_convert_loc (loc, sizetype, len);
8303 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8304 dest = fold_convert_loc (loc, type, dest);
8305 dest = omit_one_operand_loc (loc, type, dest, call);
8306 return dest;
8309 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8310 If SLEN is not NULL, it represents the length of the source string.
8311 Return NULL_TREE if no simplification can be made. */
8313 tree
8314 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8315 tree src, tree len, tree slen)
8317 tree fn;
8319 if (!validate_arg (dest, POINTER_TYPE)
8320 || !validate_arg (src, POINTER_TYPE)
8321 || !validate_arg (len, INTEGER_TYPE))
8322 return NULL_TREE;
8324 /* If the LEN parameter is zero, return DEST. */
8325 if (integer_zerop (len))
8326 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8328 /* We can't compare slen with len as constants below if len is not a
8329 constant. */
8330 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8331 return NULL_TREE;
8333 if (!slen)
8334 slen = c_strlen (src, 1);
8336 /* Now, we must be passed a constant src ptr parameter. */
8337 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8338 return NULL_TREE;
8340 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8342 /* We do not support simplification of this case, though we do
8343 support it when expanding trees into RTL. */
8344 /* FIXME: generate a call to __builtin_memset. */
8345 if (tree_int_cst_lt (slen, len))
8346 return NULL_TREE;
8348 /* OK transform into builtin memcpy. */
8349 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8350 if (!fn)
8351 return NULL_TREE;
8352 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8353 build_call_expr_loc (loc, fn, 3, dest, src, len));
8356 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8357 arguments to the call, and TYPE is its return type.
8358 Return NULL_TREE if no simplification can be made. */
8360 static tree
8361 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8363 if (!validate_arg (arg1, POINTER_TYPE)
8364 || !validate_arg (arg2, INTEGER_TYPE)
8365 || !validate_arg (len, INTEGER_TYPE))
8366 return NULL_TREE;
8367 else
8369 const char *p1;
8371 if (TREE_CODE (arg2) != INTEGER_CST
8372 || !host_integerp (len, 1))
8373 return NULL_TREE;
8375 p1 = c_getstr (arg1);
8376 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8378 char c;
8379 const char *r;
8380 tree tem;
8382 if (target_char_cast (arg2, &c))
8383 return NULL_TREE;
8385 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8387 if (r == NULL)
8388 return build_int_cst (TREE_TYPE (arg1), 0);
8390 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8391 size_int (r - p1));
8392 return fold_convert_loc (loc, type, tem);
8394 return NULL_TREE;
8398 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8399 Return NULL_TREE if no simplification can be made. */
8401 static tree
8402 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8404 const char *p1, *p2;
8406 if (!validate_arg (arg1, POINTER_TYPE)
8407 || !validate_arg (arg2, POINTER_TYPE)
8408 || !validate_arg (len, INTEGER_TYPE))
8409 return NULL_TREE;
8411 /* If the LEN parameter is zero, return zero. */
8412 if (integer_zerop (len))
8413 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8414 arg1, arg2);
8416 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8417 if (operand_equal_p (arg1, arg2, 0))
8418 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8420 p1 = c_getstr (arg1);
8421 p2 = c_getstr (arg2);
8423 /* If all arguments are constant, and the value of len is not greater
8424 than the lengths of arg1 and arg2, evaluate at compile-time. */
8425 if (host_integerp (len, 1) && p1 && p2
8426 && compare_tree_int (len, strlen (p1) + 1) <= 0
8427 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8429 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8431 if (r > 0)
8432 return integer_one_node;
8433 else if (r < 0)
8434 return integer_minus_one_node;
8435 else
8436 return integer_zero_node;
8439 /* If len parameter is one, return an expression corresponding to
8440 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8441 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8443 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8444 tree cst_uchar_ptr_node
8445 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8447 tree ind1
8448 = fold_convert_loc (loc, integer_type_node,
8449 build1 (INDIRECT_REF, cst_uchar_node,
8450 fold_convert_loc (loc,
8451 cst_uchar_ptr_node,
8452 arg1)));
8453 tree ind2
8454 = fold_convert_loc (loc, integer_type_node,
8455 build1 (INDIRECT_REF, cst_uchar_node,
8456 fold_convert_loc (loc,
8457 cst_uchar_ptr_node,
8458 arg2)));
8459 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8462 return NULL_TREE;
8465 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8466 Return NULL_TREE if no simplification can be made. */
8468 static tree
8469 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8471 const char *p1, *p2;
8473 if (!validate_arg (arg1, POINTER_TYPE)
8474 || !validate_arg (arg2, POINTER_TYPE))
8475 return NULL_TREE;
8477 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8478 if (operand_equal_p (arg1, arg2, 0))
8479 return integer_zero_node;
8481 p1 = c_getstr (arg1);
8482 p2 = c_getstr (arg2);
8484 if (p1 && p2)
8486 const int i = strcmp (p1, p2);
8487 if (i < 0)
8488 return integer_minus_one_node;
8489 else if (i > 0)
8490 return integer_one_node;
8491 else
8492 return integer_zero_node;
8495 /* If the second arg is "", return *(const unsigned char*)arg1. */
8496 if (p2 && *p2 == '\0')
8498 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8499 tree cst_uchar_ptr_node
8500 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8502 return fold_convert_loc (loc, integer_type_node,
8503 build1 (INDIRECT_REF, cst_uchar_node,
8504 fold_convert_loc (loc,
8505 cst_uchar_ptr_node,
8506 arg1)));
8509 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8510 if (p1 && *p1 == '\0')
8512 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8513 tree cst_uchar_ptr_node
8514 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8516 tree temp
8517 = fold_convert_loc (loc, integer_type_node,
8518 build1 (INDIRECT_REF, cst_uchar_node,
8519 fold_convert_loc (loc,
8520 cst_uchar_ptr_node,
8521 arg2)));
8522 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8525 return NULL_TREE;
8528 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8529 Return NULL_TREE if no simplification can be made. */
8531 static tree
8532 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8534 const char *p1, *p2;
8536 if (!validate_arg (arg1, POINTER_TYPE)
8537 || !validate_arg (arg2, POINTER_TYPE)
8538 || !validate_arg (len, INTEGER_TYPE))
8539 return NULL_TREE;
8541 /* If the LEN parameter is zero, return zero. */
8542 if (integer_zerop (len))
8543 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8544 arg1, arg2);
8546 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8547 if (operand_equal_p (arg1, arg2, 0))
8548 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8550 p1 = c_getstr (arg1);
8551 p2 = c_getstr (arg2);
8553 if (host_integerp (len, 1) && p1 && p2)
8555 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8556 if (i > 0)
8557 return integer_one_node;
8558 else if (i < 0)
8559 return integer_minus_one_node;
8560 else
8561 return integer_zero_node;
8564 /* If the second arg is "", and the length is greater than zero,
8565 return *(const unsigned char*)arg1. */
8566 if (p2 && *p2 == '\0'
8567 && TREE_CODE (len) == INTEGER_CST
8568 && tree_int_cst_sgn (len) == 1)
8570 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8571 tree cst_uchar_ptr_node
8572 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8574 return fold_convert_loc (loc, integer_type_node,
8575 build1 (INDIRECT_REF, cst_uchar_node,
8576 fold_convert_loc (loc,
8577 cst_uchar_ptr_node,
8578 arg1)));
8581 /* If the first arg is "", and the length is greater than zero,
8582 return -*(const unsigned char*)arg2. */
8583 if (p1 && *p1 == '\0'
8584 && TREE_CODE (len) == INTEGER_CST
8585 && tree_int_cst_sgn (len) == 1)
8587 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8588 tree cst_uchar_ptr_node
8589 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8591 tree temp = fold_convert_loc (loc, integer_type_node,
8592 build1 (INDIRECT_REF, cst_uchar_node,
8593 fold_convert_loc (loc,
8594 cst_uchar_ptr_node,
8595 arg2)));
8596 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8599 /* If len parameter is one, return an expression corresponding to
8600 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8601 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8603 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8604 tree cst_uchar_ptr_node
8605 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8607 tree ind1 = fold_convert_loc (loc, integer_type_node,
8608 build1 (INDIRECT_REF, cst_uchar_node,
8609 fold_convert_loc (loc,
8610 cst_uchar_ptr_node,
8611 arg1)));
8612 tree ind2 = fold_convert_loc (loc, integer_type_node,
8613 build1 (INDIRECT_REF, cst_uchar_node,
8614 fold_convert_loc (loc,
8615 cst_uchar_ptr_node,
8616 arg2)));
8617 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8620 return NULL_TREE;
8623 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8624 ARG. Return NULL_TREE if no simplification can be made. */
8626 static tree
8627 fold_builtin_signbit (location_t loc, tree arg, tree type)
8629 if (!validate_arg (arg, REAL_TYPE))
8630 return NULL_TREE;
8632 /* If ARG is a compile-time constant, determine the result. */
8633 if (TREE_CODE (arg) == REAL_CST
8634 && !TREE_OVERFLOW (arg))
8636 REAL_VALUE_TYPE c;
8638 c = TREE_REAL_CST (arg);
8639 return (REAL_VALUE_NEGATIVE (c)
8640 ? build_one_cst (type)
8641 : build_zero_cst (type));
8644 /* If ARG is non-negative, the result is always zero. */
8645 if (tree_expr_nonnegative_p (arg))
8646 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8648 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8649 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8650 return fold_build2_loc (loc, LT_EXPR, type, arg,
8651 build_real (TREE_TYPE (arg), dconst0));
8653 return NULL_TREE;
8656 /* Fold function call to builtin copysign, copysignf or copysignl with
8657 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8658 be made. */
8660 static tree
8661 fold_builtin_copysign (location_t loc, tree fndecl,
8662 tree arg1, tree arg2, tree type)
8664 tree tem;
8666 if (!validate_arg (arg1, REAL_TYPE)
8667 || !validate_arg (arg2, REAL_TYPE))
8668 return NULL_TREE;
8670 /* copysign(X,X) is X. */
8671 if (operand_equal_p (arg1, arg2, 0))
8672 return fold_convert_loc (loc, type, arg1);
8674 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8675 if (TREE_CODE (arg1) == REAL_CST
8676 && TREE_CODE (arg2) == REAL_CST
8677 && !TREE_OVERFLOW (arg1)
8678 && !TREE_OVERFLOW (arg2))
8680 REAL_VALUE_TYPE c1, c2;
8682 c1 = TREE_REAL_CST (arg1);
8683 c2 = TREE_REAL_CST (arg2);
8684 /* c1.sign := c2.sign. */
8685 real_copysign (&c1, &c2);
8686 return build_real (type, c1);
8689 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8690 Remember to evaluate Y for side-effects. */
8691 if (tree_expr_nonnegative_p (arg2))
8692 return omit_one_operand_loc (loc, type,
8693 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8694 arg2);
8696 /* Strip sign changing operations for the first argument. */
8697 tem = fold_strip_sign_ops (arg1);
8698 if (tem)
8699 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8701 return NULL_TREE;
8704 /* Fold a call to builtin isascii with argument ARG. */
8706 static tree
8707 fold_builtin_isascii (location_t loc, tree arg)
8709 if (!validate_arg (arg, INTEGER_TYPE))
8710 return NULL_TREE;
8711 else
8713 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8714 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8715 build_int_cst (integer_type_node,
8716 ~ (unsigned HOST_WIDE_INT) 0x7f));
8717 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8718 arg, integer_zero_node);
8722 /* Fold a call to builtin toascii with argument ARG. */
8724 static tree
8725 fold_builtin_toascii (location_t loc, tree arg)
8727 if (!validate_arg (arg, INTEGER_TYPE))
8728 return NULL_TREE;
8730 /* Transform toascii(c) -> (c & 0x7f). */
8731 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8732 build_int_cst (integer_type_node, 0x7f));
8735 /* Fold a call to builtin isdigit with argument ARG. */
8737 static tree
8738 fold_builtin_isdigit (location_t loc, tree arg)
8740 if (!validate_arg (arg, INTEGER_TYPE))
8741 return NULL_TREE;
8742 else
8744 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8745 /* According to the C standard, isdigit is unaffected by locale.
8746 However, it definitely is affected by the target character set. */
8747 unsigned HOST_WIDE_INT target_digit0
8748 = lang_hooks.to_target_charset ('0');
8750 if (target_digit0 == 0)
8751 return NULL_TREE;
8753 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8754 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8755 build_int_cst (unsigned_type_node, target_digit0));
8756 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8757 build_int_cst (unsigned_type_node, 9));
8761 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8763 static tree
8764 fold_builtin_fabs (location_t loc, tree arg, tree type)
8766 if (!validate_arg (arg, REAL_TYPE))
8767 return NULL_TREE;
8769 arg = fold_convert_loc (loc, type, arg);
8770 if (TREE_CODE (arg) == REAL_CST)
8771 return fold_abs_const (arg, type);
8772 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8775 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8777 static tree
8778 fold_builtin_abs (location_t loc, tree arg, tree type)
8780 if (!validate_arg (arg, INTEGER_TYPE))
8781 return NULL_TREE;
8783 arg = fold_convert_loc (loc, type, arg);
8784 if (TREE_CODE (arg) == INTEGER_CST)
8785 return fold_abs_const (arg, type);
8786 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8789 /* Fold a fma operation with arguments ARG[012]. */
8791 tree
8792 fold_fma (location_t loc ATTRIBUTE_UNUSED,
8793 tree type, tree arg0, tree arg1, tree arg2)
8795 if (TREE_CODE (arg0) == REAL_CST
8796 && TREE_CODE (arg1) == REAL_CST
8797 && TREE_CODE (arg2) == REAL_CST)
8798 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8800 return NULL_TREE;
8803 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8805 static tree
8806 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8808 if (validate_arg (arg0, REAL_TYPE)
8809 && validate_arg(arg1, REAL_TYPE)
8810 && validate_arg(arg2, REAL_TYPE))
8812 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
8813 if (tem)
8814 return tem;
8816 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8817 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8818 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8820 return NULL_TREE;
8823 /* Fold a call to builtin fmin or fmax. */
8825 static tree
8826 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8827 tree type, bool max)
8829 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8831 /* Calculate the result when the argument is a constant. */
8832 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8834 if (res)
8835 return res;
8837 /* If either argument is NaN, return the other one. Avoid the
8838 transformation if we get (and honor) a signalling NaN. Using
8839 omit_one_operand() ensures we create a non-lvalue. */
8840 if (TREE_CODE (arg0) == REAL_CST
8841 && real_isnan (&TREE_REAL_CST (arg0))
8842 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8843 || ! TREE_REAL_CST (arg0).signalling))
8844 return omit_one_operand_loc (loc, type, arg1, arg0);
8845 if (TREE_CODE (arg1) == REAL_CST
8846 && real_isnan (&TREE_REAL_CST (arg1))
8847 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
8848 || ! TREE_REAL_CST (arg1).signalling))
8849 return omit_one_operand_loc (loc, type, arg0, arg1);
8851 /* Transform fmin/fmax(x,x) -> x. */
8852 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8853 return omit_one_operand_loc (loc, type, arg0, arg1);
8855 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8856 functions to return the numeric arg if the other one is NaN.
8857 These tree codes don't honor that, so only transform if
8858 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8859 handled, so we don't have to worry about it either. */
8860 if (flag_finite_math_only)
8861 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
8862 fold_convert_loc (loc, type, arg0),
8863 fold_convert_loc (loc, type, arg1));
8865 return NULL_TREE;
8868 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8870 static tree
8871 fold_builtin_carg (location_t loc, tree arg, tree type)
8873 if (validate_arg (arg, COMPLEX_TYPE)
8874 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8876 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8878 if (atan2_fn)
8880 tree new_arg = builtin_save_expr (arg);
8881 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8882 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8883 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8887 return NULL_TREE;
8890 /* Fold a call to builtin logb/ilogb. */
8892 static tree
8893 fold_builtin_logb (location_t loc, tree arg, tree rettype)
8895 if (! validate_arg (arg, REAL_TYPE))
8896 return NULL_TREE;
8898 STRIP_NOPS (arg);
8900 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8902 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8904 switch (value->cl)
8906 case rvc_nan:
8907 case rvc_inf:
8908 /* If arg is Inf or NaN and we're logb, return it. */
8909 if (TREE_CODE (rettype) == REAL_TYPE)
8910 return fold_convert_loc (loc, rettype, arg);
8911 /* Fall through... */
8912 case rvc_zero:
8913 /* Zero may set errno and/or raise an exception for logb, also
8914 for ilogb we don't know FP_ILOGB0. */
8915 return NULL_TREE;
8916 case rvc_normal:
8917 /* For normal numbers, proceed iff radix == 2. In GCC,
8918 normalized significands are in the range [0.5, 1.0). We
8919 want the exponent as if they were [1.0, 2.0) so get the
8920 exponent and subtract 1. */
8921 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8922 return fold_convert_loc (loc, rettype,
8923 build_int_cst (integer_type_node,
8924 REAL_EXP (value)-1));
8925 break;
8929 return NULL_TREE;
8932 /* Fold a call to builtin significand, if radix == 2. */
8934 static tree
8935 fold_builtin_significand (location_t loc, tree arg, tree rettype)
8937 if (! validate_arg (arg, REAL_TYPE))
8938 return NULL_TREE;
8940 STRIP_NOPS (arg);
8942 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8944 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
8946 switch (value->cl)
8948 case rvc_zero:
8949 case rvc_nan:
8950 case rvc_inf:
8951 /* If arg is +-0, +-Inf or +-NaN, then return it. */
8952 return fold_convert_loc (loc, rettype, arg);
8953 case rvc_normal:
8954 /* For normal numbers, proceed iff radix == 2. */
8955 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8957 REAL_VALUE_TYPE result = *value;
8958 /* In GCC, normalized significands are in the range [0.5,
8959 1.0). We want them to be [1.0, 2.0) so set the
8960 exponent to 1. */
8961 SET_REAL_EXP (&result, 1);
8962 return build_real (rettype, result);
8964 break;
8968 return NULL_TREE;
8971 /* Fold a call to builtin frexp, we can assume the base is 2. */
8973 static tree
8974 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8976 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8977 return NULL_TREE;
8979 STRIP_NOPS (arg0);
8981 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8982 return NULL_TREE;
8984 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8986 /* Proceed if a valid pointer type was passed in. */
8987 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8989 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8990 tree frac, exp;
8992 switch (value->cl)
8994 case rvc_zero:
8995 /* For +-0, return (*exp = 0, +-0). */
8996 exp = integer_zero_node;
8997 frac = arg0;
8998 break;
8999 case rvc_nan:
9000 case rvc_inf:
9001 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9002 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9003 case rvc_normal:
9005 /* Since the frexp function always expects base 2, and in
9006 GCC normalized significands are already in the range
9007 [0.5, 1.0), we have exactly what frexp wants. */
9008 REAL_VALUE_TYPE frac_rvt = *value;
9009 SET_REAL_EXP (&frac_rvt, 0);
9010 frac = build_real (rettype, frac_rvt);
9011 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9013 break;
9014 default:
9015 gcc_unreachable ();
9018 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9019 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9020 TREE_SIDE_EFFECTS (arg1) = 1;
9021 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9024 return NULL_TREE;
9027 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9028 then we can assume the base is two. If it's false, then we have to
9029 check the mode of the TYPE parameter in certain cases. */
9031 static tree
9032 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9033 tree type, bool ldexp)
9035 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9037 STRIP_NOPS (arg0);
9038 STRIP_NOPS (arg1);
9040 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9041 if (real_zerop (arg0) || integer_zerop (arg1)
9042 || (TREE_CODE (arg0) == REAL_CST
9043 && !real_isfinite (&TREE_REAL_CST (arg0))))
9044 return omit_one_operand_loc (loc, type, arg0, arg1);
9046 /* If both arguments are constant, then try to evaluate it. */
9047 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9048 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9049 && host_integerp (arg1, 0))
9051 /* Bound the maximum adjustment to twice the range of the
9052 mode's valid exponents. Use abs to ensure the range is
9053 positive as a sanity check. */
9054 const long max_exp_adj = 2 *
9055 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9056 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9058 /* Get the user-requested adjustment. */
9059 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9061 /* The requested adjustment must be inside this range. This
9062 is a preliminary cap to avoid things like overflow, we
9063 may still fail to compute the result for other reasons. */
9064 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9066 REAL_VALUE_TYPE initial_result;
9068 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9070 /* Ensure we didn't overflow. */
9071 if (! real_isinf (&initial_result))
9073 const REAL_VALUE_TYPE trunc_result
9074 = real_value_truncate (TYPE_MODE (type), initial_result);
9076 /* Only proceed if the target mode can hold the
9077 resulting value. */
9078 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9079 return build_real (type, trunc_result);
9085 return NULL_TREE;
9088 /* Fold a call to builtin modf. */
9090 static tree
9091 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9093 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9094 return NULL_TREE;
9096 STRIP_NOPS (arg0);
9098 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9099 return NULL_TREE;
9101 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9103 /* Proceed if a valid pointer type was passed in. */
9104 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9106 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9107 REAL_VALUE_TYPE trunc, frac;
9109 switch (value->cl)
9111 case rvc_nan:
9112 case rvc_zero:
9113 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9114 trunc = frac = *value;
9115 break;
9116 case rvc_inf:
9117 /* For +-Inf, return (*arg1 = arg0, +-0). */
9118 frac = dconst0;
9119 frac.sign = value->sign;
9120 trunc = *value;
9121 break;
9122 case rvc_normal:
9123 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9124 real_trunc (&trunc, VOIDmode, value);
9125 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9126 /* If the original number was negative and already
9127 integral, then the fractional part is -0.0. */
9128 if (value->sign && frac.cl == rvc_zero)
9129 frac.sign = value->sign;
9130 break;
9133 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9134 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9135 build_real (rettype, trunc));
9136 TREE_SIDE_EFFECTS (arg1) = 1;
9137 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9138 build_real (rettype, frac));
9141 return NULL_TREE;
9144 /* Given a location LOC, an interclass builtin function decl FNDECL
9145 and its single argument ARG, return an folded expression computing
9146 the same, or NULL_TREE if we either couldn't or didn't want to fold
9147 (the latter happen if there's an RTL instruction available). */
9149 static tree
9150 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9152 enum machine_mode mode;
9154 if (!validate_arg (arg, REAL_TYPE))
9155 return NULL_TREE;
9157 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9158 return NULL_TREE;
9160 mode = TYPE_MODE (TREE_TYPE (arg));
9162 /* If there is no optab, try generic code. */
9163 switch (DECL_FUNCTION_CODE (fndecl))
9165 tree result;
9167 CASE_FLT_FN (BUILT_IN_ISINF):
9169 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9170 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9171 tree const type = TREE_TYPE (arg);
9172 REAL_VALUE_TYPE r;
9173 char buf[128];
9175 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9176 real_from_string (&r, buf);
9177 result = build_call_expr (isgr_fn, 2,
9178 fold_build1_loc (loc, ABS_EXPR, type, arg),
9179 build_real (type, r));
9180 return result;
9182 CASE_FLT_FN (BUILT_IN_FINITE):
9183 case BUILT_IN_ISFINITE:
9185 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9186 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9187 tree const type = TREE_TYPE (arg);
9188 REAL_VALUE_TYPE r;
9189 char buf[128];
9191 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9192 real_from_string (&r, buf);
9193 result = build_call_expr (isle_fn, 2,
9194 fold_build1_loc (loc, ABS_EXPR, type, arg),
9195 build_real (type, r));
9196 /*result = fold_build2_loc (loc, UNGT_EXPR,
9197 TREE_TYPE (TREE_TYPE (fndecl)),
9198 fold_build1_loc (loc, ABS_EXPR, type, arg),
9199 build_real (type, r));
9200 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9201 TREE_TYPE (TREE_TYPE (fndecl)),
9202 result);*/
9203 return result;
9205 case BUILT_IN_ISNORMAL:
9207 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9208 islessequal(fabs(x),DBL_MAX). */
9209 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9210 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9211 tree const type = TREE_TYPE (arg);
9212 REAL_VALUE_TYPE rmax, rmin;
9213 char buf[128];
9215 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9216 real_from_string (&rmax, buf);
9217 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9218 real_from_string (&rmin, buf);
9219 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9220 result = build_call_expr (isle_fn, 2, arg,
9221 build_real (type, rmax));
9222 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9223 build_call_expr (isge_fn, 2, arg,
9224 build_real (type, rmin)));
9225 return result;
9227 default:
9228 break;
9231 return NULL_TREE;
9234 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9235 ARG is the argument for the call. */
9237 static tree
9238 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9240 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9241 REAL_VALUE_TYPE r;
9243 if (!validate_arg (arg, REAL_TYPE))
9244 return NULL_TREE;
9246 switch (builtin_index)
9248 case BUILT_IN_ISINF:
9249 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9250 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9252 if (TREE_CODE (arg) == REAL_CST)
9254 r = TREE_REAL_CST (arg);
9255 if (real_isinf (&r))
9256 return real_compare (GT_EXPR, &r, &dconst0)
9257 ? integer_one_node : integer_minus_one_node;
9258 else
9259 return integer_zero_node;
9262 return NULL_TREE;
9264 case BUILT_IN_ISINF_SIGN:
9266 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9267 /* In a boolean context, GCC will fold the inner COND_EXPR to
9268 1. So e.g. "if (isinf_sign(x))" would be folded to just
9269 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9270 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9271 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9272 tree tmp = NULL_TREE;
9274 arg = builtin_save_expr (arg);
9276 if (signbit_fn && isinf_fn)
9278 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9279 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9281 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9282 signbit_call, integer_zero_node);
9283 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9284 isinf_call, integer_zero_node);
9286 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9287 integer_minus_one_node, integer_one_node);
9288 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9289 isinf_call, tmp,
9290 integer_zero_node);
9293 return tmp;
9296 case BUILT_IN_ISFINITE:
9297 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9298 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9299 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9301 if (TREE_CODE (arg) == REAL_CST)
9303 r = TREE_REAL_CST (arg);
9304 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9307 return NULL_TREE;
9309 case BUILT_IN_ISNAN:
9310 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9311 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9313 if (TREE_CODE (arg) == REAL_CST)
9315 r = TREE_REAL_CST (arg);
9316 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9319 arg = builtin_save_expr (arg);
9320 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9322 default:
9323 gcc_unreachable ();
9327 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9328 This builtin will generate code to return the appropriate floating
9329 point classification depending on the value of the floating point
9330 number passed in. The possible return values must be supplied as
9331 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9332 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9333 one floating point argument which is "type generic". */
9335 static tree
9336 fold_builtin_fpclassify (location_t loc, tree exp)
9338 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9339 arg, type, res, tmp;
9340 enum machine_mode mode;
9341 REAL_VALUE_TYPE r;
9342 char buf[128];
9344 /* Verify the required arguments in the original call. */
9345 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9346 INTEGER_TYPE, INTEGER_TYPE,
9347 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9348 return NULL_TREE;
9350 fp_nan = CALL_EXPR_ARG (exp, 0);
9351 fp_infinite = CALL_EXPR_ARG (exp, 1);
9352 fp_normal = CALL_EXPR_ARG (exp, 2);
9353 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9354 fp_zero = CALL_EXPR_ARG (exp, 4);
9355 arg = CALL_EXPR_ARG (exp, 5);
9356 type = TREE_TYPE (arg);
9357 mode = TYPE_MODE (type);
9358 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9360 /* fpclassify(x) ->
9361 isnan(x) ? FP_NAN :
9362 (fabs(x) == Inf ? FP_INFINITE :
9363 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9364 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9366 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9367 build_real (type, dconst0));
9368 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9369 tmp, fp_zero, fp_subnormal);
9371 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9372 real_from_string (&r, buf);
9373 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9374 arg, build_real (type, r));
9375 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9377 if (HONOR_INFINITIES (mode))
9379 real_inf (&r);
9380 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9381 build_real (type, r));
9382 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9383 fp_infinite, res);
9386 if (HONOR_NANS (mode))
9388 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9389 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9392 return res;
9395 /* Fold a call to an unordered comparison function such as
9396 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9397 being called and ARG0 and ARG1 are the arguments for the call.
9398 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9399 the opposite of the desired result. UNORDERED_CODE is used
9400 for modes that can hold NaNs and ORDERED_CODE is used for
9401 the rest. */
9403 static tree
9404 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9405 enum tree_code unordered_code,
9406 enum tree_code ordered_code)
9408 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9409 enum tree_code code;
9410 tree type0, type1;
9411 enum tree_code code0, code1;
9412 tree cmp_type = NULL_TREE;
9414 type0 = TREE_TYPE (arg0);
9415 type1 = TREE_TYPE (arg1);
9417 code0 = TREE_CODE (type0);
9418 code1 = TREE_CODE (type1);
9420 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9421 /* Choose the wider of two real types. */
9422 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9423 ? type0 : type1;
9424 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9425 cmp_type = type0;
9426 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9427 cmp_type = type1;
9429 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9430 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9432 if (unordered_code == UNORDERED_EXPR)
9434 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9435 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9436 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9439 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9440 : ordered_code;
9441 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9442 fold_build2_loc (loc, code, type, arg0, arg1));
9445 /* Fold a call to built-in function FNDECL with 0 arguments.
9446 IGNORE is true if the result of the function call is ignored. This
9447 function returns NULL_TREE if no simplification was possible. */
9449 static tree
9450 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9452 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9453 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9454 switch (fcode)
9456 CASE_FLT_FN (BUILT_IN_INF):
9457 case BUILT_IN_INFD32:
9458 case BUILT_IN_INFD64:
9459 case BUILT_IN_INFD128:
9460 return fold_builtin_inf (loc, type, true);
9462 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9463 return fold_builtin_inf (loc, type, false);
9465 case BUILT_IN_CLASSIFY_TYPE:
9466 return fold_builtin_classify_type (NULL_TREE);
9468 default:
9469 break;
9471 return NULL_TREE;
9474 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9475 IGNORE is true if the result of the function call is ignored. This
9476 function returns NULL_TREE if no simplification was possible. */
9478 static tree
9479 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9481 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9482 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9483 switch (fcode)
9485 case BUILT_IN_CONSTANT_P:
9487 tree val = fold_builtin_constant_p (arg0);
9489 /* Gimplification will pull the CALL_EXPR for the builtin out of
9490 an if condition. When not optimizing, we'll not CSE it back.
9491 To avoid link error types of regressions, return false now. */
9492 if (!val && !optimize)
9493 val = integer_zero_node;
9495 return val;
9498 case BUILT_IN_CLASSIFY_TYPE:
9499 return fold_builtin_classify_type (arg0);
9501 case BUILT_IN_STRLEN:
9502 return fold_builtin_strlen (loc, type, arg0);
9504 CASE_FLT_FN (BUILT_IN_FABS):
9505 return fold_builtin_fabs (loc, arg0, type);
9507 case BUILT_IN_ABS:
9508 case BUILT_IN_LABS:
9509 case BUILT_IN_LLABS:
9510 case BUILT_IN_IMAXABS:
9511 return fold_builtin_abs (loc, arg0, type);
9513 CASE_FLT_FN (BUILT_IN_CONJ):
9514 if (validate_arg (arg0, COMPLEX_TYPE)
9515 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9516 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9517 break;
9519 CASE_FLT_FN (BUILT_IN_CREAL):
9520 if (validate_arg (arg0, COMPLEX_TYPE)
9521 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9522 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9523 break;
9525 CASE_FLT_FN (BUILT_IN_CIMAG):
9526 if (validate_arg (arg0, COMPLEX_TYPE)
9527 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9528 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9529 break;
9531 CASE_FLT_FN (BUILT_IN_CCOS):
9532 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9534 CASE_FLT_FN (BUILT_IN_CCOSH):
9535 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9537 CASE_FLT_FN (BUILT_IN_CPROJ):
9538 return fold_builtin_cproj(loc, arg0, type);
9540 CASE_FLT_FN (BUILT_IN_CSIN):
9541 if (validate_arg (arg0, COMPLEX_TYPE)
9542 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9543 return do_mpc_arg1 (arg0, type, mpc_sin);
9544 break;
9546 CASE_FLT_FN (BUILT_IN_CSINH):
9547 if (validate_arg (arg0, COMPLEX_TYPE)
9548 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9549 return do_mpc_arg1 (arg0, type, mpc_sinh);
9550 break;
9552 CASE_FLT_FN (BUILT_IN_CTAN):
9553 if (validate_arg (arg0, COMPLEX_TYPE)
9554 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9555 return do_mpc_arg1 (arg0, type, mpc_tan);
9556 break;
9558 CASE_FLT_FN (BUILT_IN_CTANH):
9559 if (validate_arg (arg0, COMPLEX_TYPE)
9560 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9561 return do_mpc_arg1 (arg0, type, mpc_tanh);
9562 break;
9564 CASE_FLT_FN (BUILT_IN_CLOG):
9565 if (validate_arg (arg0, COMPLEX_TYPE)
9566 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9567 return do_mpc_arg1 (arg0, type, mpc_log);
9568 break;
9570 CASE_FLT_FN (BUILT_IN_CSQRT):
9571 if (validate_arg (arg0, COMPLEX_TYPE)
9572 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9573 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9574 break;
9576 CASE_FLT_FN (BUILT_IN_CASIN):
9577 if (validate_arg (arg0, COMPLEX_TYPE)
9578 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9579 return do_mpc_arg1 (arg0, type, mpc_asin);
9580 break;
9582 CASE_FLT_FN (BUILT_IN_CACOS):
9583 if (validate_arg (arg0, COMPLEX_TYPE)
9584 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9585 return do_mpc_arg1 (arg0, type, mpc_acos);
9586 break;
9588 CASE_FLT_FN (BUILT_IN_CATAN):
9589 if (validate_arg (arg0, COMPLEX_TYPE)
9590 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9591 return do_mpc_arg1 (arg0, type, mpc_atan);
9592 break;
9594 CASE_FLT_FN (BUILT_IN_CASINH):
9595 if (validate_arg (arg0, COMPLEX_TYPE)
9596 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9597 return do_mpc_arg1 (arg0, type, mpc_asinh);
9598 break;
9600 CASE_FLT_FN (BUILT_IN_CACOSH):
9601 if (validate_arg (arg0, COMPLEX_TYPE)
9602 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9603 return do_mpc_arg1 (arg0, type, mpc_acosh);
9604 break;
9606 CASE_FLT_FN (BUILT_IN_CATANH):
9607 if (validate_arg (arg0, COMPLEX_TYPE)
9608 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9609 return do_mpc_arg1 (arg0, type, mpc_atanh);
9610 break;
9612 CASE_FLT_FN (BUILT_IN_CABS):
9613 return fold_builtin_cabs (loc, arg0, type, fndecl);
9615 CASE_FLT_FN (BUILT_IN_CARG):
9616 return fold_builtin_carg (loc, arg0, type);
9618 CASE_FLT_FN (BUILT_IN_SQRT):
9619 return fold_builtin_sqrt (loc, arg0, type);
9621 CASE_FLT_FN (BUILT_IN_CBRT):
9622 return fold_builtin_cbrt (loc, arg0, type);
9624 CASE_FLT_FN (BUILT_IN_ASIN):
9625 if (validate_arg (arg0, REAL_TYPE))
9626 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9627 &dconstm1, &dconst1, true);
9628 break;
9630 CASE_FLT_FN (BUILT_IN_ACOS):
9631 if (validate_arg (arg0, REAL_TYPE))
9632 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9633 &dconstm1, &dconst1, true);
9634 break;
9636 CASE_FLT_FN (BUILT_IN_ATAN):
9637 if (validate_arg (arg0, REAL_TYPE))
9638 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9639 break;
9641 CASE_FLT_FN (BUILT_IN_ASINH):
9642 if (validate_arg (arg0, REAL_TYPE))
9643 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9644 break;
9646 CASE_FLT_FN (BUILT_IN_ACOSH):
9647 if (validate_arg (arg0, REAL_TYPE))
9648 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9649 &dconst1, NULL, true);
9650 break;
9652 CASE_FLT_FN (BUILT_IN_ATANH):
9653 if (validate_arg (arg0, REAL_TYPE))
9654 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9655 &dconstm1, &dconst1, false);
9656 break;
9658 CASE_FLT_FN (BUILT_IN_SIN):
9659 if (validate_arg (arg0, REAL_TYPE))
9660 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9661 break;
9663 CASE_FLT_FN (BUILT_IN_COS):
9664 return fold_builtin_cos (loc, arg0, type, fndecl);
9666 CASE_FLT_FN (BUILT_IN_TAN):
9667 return fold_builtin_tan (arg0, type);
9669 CASE_FLT_FN (BUILT_IN_CEXP):
9670 return fold_builtin_cexp (loc, arg0, type);
9672 CASE_FLT_FN (BUILT_IN_CEXPI):
9673 if (validate_arg (arg0, REAL_TYPE))
9674 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9675 break;
9677 CASE_FLT_FN (BUILT_IN_SINH):
9678 if (validate_arg (arg0, REAL_TYPE))
9679 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9680 break;
9682 CASE_FLT_FN (BUILT_IN_COSH):
9683 return fold_builtin_cosh (loc, arg0, type, fndecl);
9685 CASE_FLT_FN (BUILT_IN_TANH):
9686 if (validate_arg (arg0, REAL_TYPE))
9687 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9688 break;
9690 CASE_FLT_FN (BUILT_IN_ERF):
9691 if (validate_arg (arg0, REAL_TYPE))
9692 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9693 break;
9695 CASE_FLT_FN (BUILT_IN_ERFC):
9696 if (validate_arg (arg0, REAL_TYPE))
9697 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9698 break;
9700 CASE_FLT_FN (BUILT_IN_TGAMMA):
9701 if (validate_arg (arg0, REAL_TYPE))
9702 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9703 break;
9705 CASE_FLT_FN (BUILT_IN_EXP):
9706 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9708 CASE_FLT_FN (BUILT_IN_EXP2):
9709 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9711 CASE_FLT_FN (BUILT_IN_EXP10):
9712 CASE_FLT_FN (BUILT_IN_POW10):
9713 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9715 CASE_FLT_FN (BUILT_IN_EXPM1):
9716 if (validate_arg (arg0, REAL_TYPE))
9717 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9718 break;
9720 CASE_FLT_FN (BUILT_IN_LOG):
9721 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9723 CASE_FLT_FN (BUILT_IN_LOG2):
9724 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9726 CASE_FLT_FN (BUILT_IN_LOG10):
9727 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9729 CASE_FLT_FN (BUILT_IN_LOG1P):
9730 if (validate_arg (arg0, REAL_TYPE))
9731 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9732 &dconstm1, NULL, false);
9733 break;
9735 CASE_FLT_FN (BUILT_IN_J0):
9736 if (validate_arg (arg0, REAL_TYPE))
9737 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9738 NULL, NULL, 0);
9739 break;
9741 CASE_FLT_FN (BUILT_IN_J1):
9742 if (validate_arg (arg0, REAL_TYPE))
9743 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9744 NULL, NULL, 0);
9745 break;
9747 CASE_FLT_FN (BUILT_IN_Y0):
9748 if (validate_arg (arg0, REAL_TYPE))
9749 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9750 &dconst0, NULL, false);
9751 break;
9753 CASE_FLT_FN (BUILT_IN_Y1):
9754 if (validate_arg (arg0, REAL_TYPE))
9755 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9756 &dconst0, NULL, false);
9757 break;
9759 CASE_FLT_FN (BUILT_IN_NAN):
9760 case BUILT_IN_NAND32:
9761 case BUILT_IN_NAND64:
9762 case BUILT_IN_NAND128:
9763 return fold_builtin_nan (arg0, type, true);
9765 CASE_FLT_FN (BUILT_IN_NANS):
9766 return fold_builtin_nan (arg0, type, false);
9768 CASE_FLT_FN (BUILT_IN_FLOOR):
9769 return fold_builtin_floor (loc, fndecl, arg0);
9771 CASE_FLT_FN (BUILT_IN_CEIL):
9772 return fold_builtin_ceil (loc, fndecl, arg0);
9774 CASE_FLT_FN (BUILT_IN_TRUNC):
9775 return fold_builtin_trunc (loc, fndecl, arg0);
9777 CASE_FLT_FN (BUILT_IN_ROUND):
9778 return fold_builtin_round (loc, fndecl, arg0);
9780 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9781 CASE_FLT_FN (BUILT_IN_RINT):
9782 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9784 CASE_FLT_FN (BUILT_IN_LCEIL):
9785 CASE_FLT_FN (BUILT_IN_LLCEIL):
9786 CASE_FLT_FN (BUILT_IN_LFLOOR):
9787 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9788 CASE_FLT_FN (BUILT_IN_LROUND):
9789 CASE_FLT_FN (BUILT_IN_LLROUND):
9790 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9792 CASE_FLT_FN (BUILT_IN_LRINT):
9793 CASE_FLT_FN (BUILT_IN_LLRINT):
9794 return fold_fixed_mathfn (loc, fndecl, arg0);
9796 case BUILT_IN_BSWAP32:
9797 case BUILT_IN_BSWAP64:
9798 return fold_builtin_bswap (fndecl, arg0);
9800 CASE_INT_FN (BUILT_IN_FFS):
9801 CASE_INT_FN (BUILT_IN_CLZ):
9802 CASE_INT_FN (BUILT_IN_CTZ):
9803 CASE_INT_FN (BUILT_IN_CLRSB):
9804 CASE_INT_FN (BUILT_IN_POPCOUNT):
9805 CASE_INT_FN (BUILT_IN_PARITY):
9806 return fold_builtin_bitop (fndecl, arg0);
9808 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9809 return fold_builtin_signbit (loc, arg0, type);
9811 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9812 return fold_builtin_significand (loc, arg0, type);
9814 CASE_FLT_FN (BUILT_IN_ILOGB):
9815 CASE_FLT_FN (BUILT_IN_LOGB):
9816 return fold_builtin_logb (loc, arg0, type);
9818 case BUILT_IN_ISASCII:
9819 return fold_builtin_isascii (loc, arg0);
9821 case BUILT_IN_TOASCII:
9822 return fold_builtin_toascii (loc, arg0);
9824 case BUILT_IN_ISDIGIT:
9825 return fold_builtin_isdigit (loc, arg0);
9827 CASE_FLT_FN (BUILT_IN_FINITE):
9828 case BUILT_IN_FINITED32:
9829 case BUILT_IN_FINITED64:
9830 case BUILT_IN_FINITED128:
9831 case BUILT_IN_ISFINITE:
9833 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9834 if (ret)
9835 return ret;
9836 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9839 CASE_FLT_FN (BUILT_IN_ISINF):
9840 case BUILT_IN_ISINFD32:
9841 case BUILT_IN_ISINFD64:
9842 case BUILT_IN_ISINFD128:
9844 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9845 if (ret)
9846 return ret;
9847 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9850 case BUILT_IN_ISNORMAL:
9851 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9853 case BUILT_IN_ISINF_SIGN:
9854 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9856 CASE_FLT_FN (BUILT_IN_ISNAN):
9857 case BUILT_IN_ISNAND32:
9858 case BUILT_IN_ISNAND64:
9859 case BUILT_IN_ISNAND128:
9860 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9862 case BUILT_IN_PRINTF:
9863 case BUILT_IN_PRINTF_UNLOCKED:
9864 case BUILT_IN_VPRINTF:
9865 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
9867 case BUILT_IN_FREE:
9868 if (integer_zerop (arg0))
9869 return build_empty_stmt (loc);
9870 break;
9872 default:
9873 break;
9876 return NULL_TREE;
9880 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9881 IGNORE is true if the result of the function call is ignored. This
9882 function returns NULL_TREE if no simplification was possible. */
9884 static tree
9885 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
9887 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9888 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9890 switch (fcode)
9892 CASE_FLT_FN (BUILT_IN_JN):
9893 if (validate_arg (arg0, INTEGER_TYPE)
9894 && validate_arg (arg1, REAL_TYPE))
9895 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9896 break;
9898 CASE_FLT_FN (BUILT_IN_YN):
9899 if (validate_arg (arg0, INTEGER_TYPE)
9900 && validate_arg (arg1, REAL_TYPE))
9901 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9902 &dconst0, false);
9903 break;
9905 CASE_FLT_FN (BUILT_IN_DREM):
9906 CASE_FLT_FN (BUILT_IN_REMAINDER):
9907 if (validate_arg (arg0, REAL_TYPE)
9908 && validate_arg(arg1, REAL_TYPE))
9909 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9910 break;
9912 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9913 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9914 if (validate_arg (arg0, REAL_TYPE)
9915 && validate_arg(arg1, POINTER_TYPE))
9916 return do_mpfr_lgamma_r (arg0, arg1, type);
9917 break;
9919 CASE_FLT_FN (BUILT_IN_ATAN2):
9920 if (validate_arg (arg0, REAL_TYPE)
9921 && validate_arg(arg1, REAL_TYPE))
9922 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9923 break;
9925 CASE_FLT_FN (BUILT_IN_FDIM):
9926 if (validate_arg (arg0, REAL_TYPE)
9927 && validate_arg(arg1, REAL_TYPE))
9928 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9929 break;
9931 CASE_FLT_FN (BUILT_IN_HYPOT):
9932 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
9934 CASE_FLT_FN (BUILT_IN_CPOW):
9935 if (validate_arg (arg0, COMPLEX_TYPE)
9936 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9937 && validate_arg (arg1, COMPLEX_TYPE)
9938 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
9939 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
9940 break;
9942 CASE_FLT_FN (BUILT_IN_LDEXP):
9943 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
9944 CASE_FLT_FN (BUILT_IN_SCALBN):
9945 CASE_FLT_FN (BUILT_IN_SCALBLN):
9946 return fold_builtin_load_exponent (loc, arg0, arg1,
9947 type, /*ldexp=*/false);
9949 CASE_FLT_FN (BUILT_IN_FREXP):
9950 return fold_builtin_frexp (loc, arg0, arg1, type);
9952 CASE_FLT_FN (BUILT_IN_MODF):
9953 return fold_builtin_modf (loc, arg0, arg1, type);
9955 case BUILT_IN_BZERO:
9956 return fold_builtin_bzero (loc, arg0, arg1, ignore);
9958 case BUILT_IN_FPUTS:
9959 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
9961 case BUILT_IN_FPUTS_UNLOCKED:
9962 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
9964 case BUILT_IN_STRSTR:
9965 return fold_builtin_strstr (loc, arg0, arg1, type);
9967 case BUILT_IN_STRCAT:
9968 return fold_builtin_strcat (loc, arg0, arg1);
9970 case BUILT_IN_STRSPN:
9971 return fold_builtin_strspn (loc, arg0, arg1);
9973 case BUILT_IN_STRCSPN:
9974 return fold_builtin_strcspn (loc, arg0, arg1);
9976 case BUILT_IN_STRCHR:
9977 case BUILT_IN_INDEX:
9978 return fold_builtin_strchr (loc, arg0, arg1, type);
9980 case BUILT_IN_STRRCHR:
9981 case BUILT_IN_RINDEX:
9982 return fold_builtin_strrchr (loc, arg0, arg1, type);
9984 case BUILT_IN_STRCPY:
9985 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
9987 case BUILT_IN_STPCPY:
9988 if (ignore)
9990 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
9991 if (!fn)
9992 break;
9994 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
9996 else
9997 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
9998 break;
10000 case BUILT_IN_STRCMP:
10001 return fold_builtin_strcmp (loc, arg0, arg1);
10003 case BUILT_IN_STRPBRK:
10004 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10006 case BUILT_IN_EXPECT:
10007 return fold_builtin_expect (loc, arg0, arg1);
10009 CASE_FLT_FN (BUILT_IN_POW):
10010 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10012 CASE_FLT_FN (BUILT_IN_POWI):
10013 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10015 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10016 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10018 CASE_FLT_FN (BUILT_IN_FMIN):
10019 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10021 CASE_FLT_FN (BUILT_IN_FMAX):
10022 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10024 case BUILT_IN_ISGREATER:
10025 return fold_builtin_unordered_cmp (loc, fndecl,
10026 arg0, arg1, UNLE_EXPR, LE_EXPR);
10027 case BUILT_IN_ISGREATEREQUAL:
10028 return fold_builtin_unordered_cmp (loc, fndecl,
10029 arg0, arg1, UNLT_EXPR, LT_EXPR);
10030 case BUILT_IN_ISLESS:
10031 return fold_builtin_unordered_cmp (loc, fndecl,
10032 arg0, arg1, UNGE_EXPR, GE_EXPR);
10033 case BUILT_IN_ISLESSEQUAL:
10034 return fold_builtin_unordered_cmp (loc, fndecl,
10035 arg0, arg1, UNGT_EXPR, GT_EXPR);
10036 case BUILT_IN_ISLESSGREATER:
10037 return fold_builtin_unordered_cmp (loc, fndecl,
10038 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10039 case BUILT_IN_ISUNORDERED:
10040 return fold_builtin_unordered_cmp (loc, fndecl,
10041 arg0, arg1, UNORDERED_EXPR,
10042 NOP_EXPR);
10044 /* We do the folding for va_start in the expander. */
10045 case BUILT_IN_VA_START:
10046 break;
10048 case BUILT_IN_SPRINTF:
10049 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10051 case BUILT_IN_OBJECT_SIZE:
10052 return fold_builtin_object_size (arg0, arg1);
10054 case BUILT_IN_PRINTF:
10055 case BUILT_IN_PRINTF_UNLOCKED:
10056 case BUILT_IN_VPRINTF:
10057 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10059 case BUILT_IN_PRINTF_CHK:
10060 case BUILT_IN_VPRINTF_CHK:
10061 if (!validate_arg (arg0, INTEGER_TYPE)
10062 || TREE_SIDE_EFFECTS (arg0))
10063 return NULL_TREE;
10064 else
10065 return fold_builtin_printf (loc, fndecl,
10066 arg1, NULL_TREE, ignore, fcode);
10067 break;
10069 case BUILT_IN_FPRINTF:
10070 case BUILT_IN_FPRINTF_UNLOCKED:
10071 case BUILT_IN_VFPRINTF:
10072 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10073 ignore, fcode);
10075 default:
10076 break;
10078 return NULL_TREE;
10081 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10082 and ARG2. IGNORE is true if the result of the function call is ignored.
10083 This function returns NULL_TREE if no simplification was possible. */
10085 static tree
10086 fold_builtin_3 (location_t loc, tree fndecl,
10087 tree arg0, tree arg1, tree arg2, bool ignore)
10089 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10090 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10091 switch (fcode)
10094 CASE_FLT_FN (BUILT_IN_SINCOS):
10095 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10097 CASE_FLT_FN (BUILT_IN_FMA):
10098 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10099 break;
10101 CASE_FLT_FN (BUILT_IN_REMQUO):
10102 if (validate_arg (arg0, REAL_TYPE)
10103 && validate_arg(arg1, REAL_TYPE)
10104 && validate_arg(arg2, POINTER_TYPE))
10105 return do_mpfr_remquo (arg0, arg1, arg2);
10106 break;
10108 case BUILT_IN_MEMSET:
10109 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10111 case BUILT_IN_BCOPY:
10112 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10113 void_type_node, true, /*endp=*/3);
10115 case BUILT_IN_MEMCPY:
10116 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10117 type, ignore, /*endp=*/0);
10119 case BUILT_IN_MEMPCPY:
10120 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10121 type, ignore, /*endp=*/1);
10123 case BUILT_IN_MEMMOVE:
10124 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10125 type, ignore, /*endp=*/3);
10127 case BUILT_IN_STRNCAT:
10128 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10130 case BUILT_IN_STRNCPY:
10131 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10133 case BUILT_IN_STRNCMP:
10134 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10136 case BUILT_IN_MEMCHR:
10137 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10139 case BUILT_IN_BCMP:
10140 case BUILT_IN_MEMCMP:
10141 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10143 case BUILT_IN_SPRINTF:
10144 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10146 case BUILT_IN_SNPRINTF:
10147 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
10149 case BUILT_IN_STRCPY_CHK:
10150 case BUILT_IN_STPCPY_CHK:
10151 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10152 ignore, fcode);
10154 case BUILT_IN_STRCAT_CHK:
10155 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10157 case BUILT_IN_PRINTF_CHK:
10158 case BUILT_IN_VPRINTF_CHK:
10159 if (!validate_arg (arg0, INTEGER_TYPE)
10160 || TREE_SIDE_EFFECTS (arg0))
10161 return NULL_TREE;
10162 else
10163 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10164 break;
10166 case BUILT_IN_FPRINTF:
10167 case BUILT_IN_FPRINTF_UNLOCKED:
10168 case BUILT_IN_VFPRINTF:
10169 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10170 ignore, fcode);
10172 case BUILT_IN_FPRINTF_CHK:
10173 case BUILT_IN_VFPRINTF_CHK:
10174 if (!validate_arg (arg1, INTEGER_TYPE)
10175 || TREE_SIDE_EFFECTS (arg1))
10176 return NULL_TREE;
10177 else
10178 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10179 ignore, fcode);
10181 default:
10182 break;
10184 return NULL_TREE;
10187 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10188 ARG2, and ARG3. IGNORE is true if the result of the function call is
10189 ignored. This function returns NULL_TREE if no simplification was
10190 possible. */
10192 static tree
10193 fold_builtin_4 (location_t loc, tree fndecl,
10194 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10196 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10198 switch (fcode)
10200 case BUILT_IN_MEMCPY_CHK:
10201 case BUILT_IN_MEMPCPY_CHK:
10202 case BUILT_IN_MEMMOVE_CHK:
10203 case BUILT_IN_MEMSET_CHK:
10204 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10205 NULL_TREE, ignore,
10206 DECL_FUNCTION_CODE (fndecl));
10208 case BUILT_IN_STRNCPY_CHK:
10209 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10211 case BUILT_IN_STRNCAT_CHK:
10212 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10214 case BUILT_IN_SNPRINTF:
10215 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
10217 case BUILT_IN_FPRINTF_CHK:
10218 case BUILT_IN_VFPRINTF_CHK:
10219 if (!validate_arg (arg1, INTEGER_TYPE)
10220 || TREE_SIDE_EFFECTS (arg1))
10221 return NULL_TREE;
10222 else
10223 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10224 ignore, fcode);
10225 break;
10227 default:
10228 break;
10230 return NULL_TREE;
10233 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10234 arguments, where NARGS <= 4. IGNORE is true if the result of the
10235 function call is ignored. This function returns NULL_TREE if no
10236 simplification was possible. Note that this only folds builtins with
10237 fixed argument patterns. Foldings that do varargs-to-varargs
10238 transformations, or that match calls with more than 4 arguments,
10239 need to be handled with fold_builtin_varargs instead. */
10241 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10243 static tree
10244 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10246 tree ret = NULL_TREE;
10248 switch (nargs)
10250 case 0:
10251 ret = fold_builtin_0 (loc, fndecl, ignore);
10252 break;
10253 case 1:
10254 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10255 break;
10256 case 2:
10257 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10258 break;
10259 case 3:
10260 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10261 break;
10262 case 4:
10263 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10264 ignore);
10265 break;
10266 default:
10267 break;
10269 if (ret)
10271 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10272 SET_EXPR_LOCATION (ret, loc);
10273 TREE_NO_WARNING (ret) = 1;
10274 return ret;
10276 return NULL_TREE;
10279 /* Builtins with folding operations that operate on "..." arguments
10280 need special handling; we need to store the arguments in a convenient
10281 data structure before attempting any folding. Fortunately there are
10282 only a few builtins that fall into this category. FNDECL is the
10283 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10284 result of the function call is ignored. */
10286 static tree
10287 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10288 bool ignore ATTRIBUTE_UNUSED)
10290 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10291 tree ret = NULL_TREE;
10293 switch (fcode)
10295 case BUILT_IN_SPRINTF_CHK:
10296 case BUILT_IN_VSPRINTF_CHK:
10297 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10298 break;
10300 case BUILT_IN_SNPRINTF_CHK:
10301 case BUILT_IN_VSNPRINTF_CHK:
10302 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10303 break;
10305 case BUILT_IN_FPCLASSIFY:
10306 ret = fold_builtin_fpclassify (loc, exp);
10307 break;
10309 default:
10310 break;
10312 if (ret)
10314 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10315 SET_EXPR_LOCATION (ret, loc);
10316 TREE_NO_WARNING (ret) = 1;
10317 return ret;
10319 return NULL_TREE;
10322 /* Return true if FNDECL shouldn't be folded right now.
10323 If a built-in function has an inline attribute always_inline
10324 wrapper, defer folding it after always_inline functions have
10325 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10326 might not be performed. */
10328 static bool
10329 avoid_folding_inline_builtin (tree fndecl)
10331 return (DECL_DECLARED_INLINE_P (fndecl)
10332 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10333 && cfun
10334 && !cfun->always_inline_functions_inlined
10335 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10338 /* A wrapper function for builtin folding that prevents warnings for
10339 "statement without effect" and the like, caused by removing the
10340 call node earlier than the warning is generated. */
10342 tree
10343 fold_call_expr (location_t loc, tree exp, bool ignore)
10345 tree ret = NULL_TREE;
10346 tree fndecl = get_callee_fndecl (exp);
10347 if (fndecl
10348 && TREE_CODE (fndecl) == FUNCTION_DECL
10349 && DECL_BUILT_IN (fndecl)
10350 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10351 yet. Defer folding until we see all the arguments
10352 (after inlining). */
10353 && !CALL_EXPR_VA_ARG_PACK (exp))
10355 int nargs = call_expr_nargs (exp);
10357 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10358 instead last argument is __builtin_va_arg_pack (). Defer folding
10359 even in that case, until arguments are finalized. */
10360 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10362 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10363 if (fndecl2
10364 && TREE_CODE (fndecl2) == FUNCTION_DECL
10365 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10366 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10367 return NULL_TREE;
10370 if (avoid_folding_inline_builtin (fndecl))
10371 return NULL_TREE;
10373 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10374 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10375 CALL_EXPR_ARGP (exp), ignore);
10376 else
10378 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10380 tree *args = CALL_EXPR_ARGP (exp);
10381 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10383 if (!ret)
10384 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10385 if (ret)
10386 return ret;
10389 return NULL_TREE;
10392 /* Conveniently construct a function call expression. FNDECL names the
10393 function to be called and N arguments are passed in the array
10394 ARGARRAY. */
10396 tree
10397 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10399 tree fntype = TREE_TYPE (fndecl);
10400 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10402 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10405 /* Conveniently construct a function call expression. FNDECL names the
10406 function to be called and the arguments are passed in the vector
10407 VEC. */
10409 tree
10410 build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
10412 return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
10413 VEC_address (tree, vec));
10417 /* Conveniently construct a function call expression. FNDECL names the
10418 function to be called, N is the number of arguments, and the "..."
10419 parameters are the argument expressions. */
10421 tree
10422 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10424 va_list ap;
10425 tree *argarray = XALLOCAVEC (tree, n);
10426 int i;
10428 va_start (ap, n);
10429 for (i = 0; i < n; i++)
10430 argarray[i] = va_arg (ap, tree);
10431 va_end (ap);
10432 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10435 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10436 varargs macros aren't supported by all bootstrap compilers. */
10438 tree
10439 build_call_expr (tree fndecl, int n, ...)
10441 va_list ap;
10442 tree *argarray = XALLOCAVEC (tree, n);
10443 int i;
10445 va_start (ap, n);
10446 for (i = 0; i < n; i++)
10447 argarray[i] = va_arg (ap, tree);
10448 va_end (ap);
10449 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10452 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10453 N arguments are passed in the array ARGARRAY. */
10455 tree
10456 fold_builtin_call_array (location_t loc, tree type,
10457 tree fn,
10458 int n,
10459 tree *argarray)
10461 tree ret = NULL_TREE;
10462 tree exp;
10464 if (TREE_CODE (fn) == ADDR_EXPR)
10466 tree fndecl = TREE_OPERAND (fn, 0);
10467 if (TREE_CODE (fndecl) == FUNCTION_DECL
10468 && DECL_BUILT_IN (fndecl))
10470 /* If last argument is __builtin_va_arg_pack (), arguments to this
10471 function are not finalized yet. Defer folding until they are. */
10472 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10474 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10475 if (fndecl2
10476 && TREE_CODE (fndecl2) == FUNCTION_DECL
10477 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10478 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10479 return build_call_array_loc (loc, type, fn, n, argarray);
10481 if (avoid_folding_inline_builtin (fndecl))
10482 return build_call_array_loc (loc, type, fn, n, argarray);
10483 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10485 ret = targetm.fold_builtin (fndecl, n, argarray, false);
10486 if (ret)
10487 return ret;
10489 return build_call_array_loc (loc, type, fn, n, argarray);
10491 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10493 /* First try the transformations that don't require consing up
10494 an exp. */
10495 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10496 if (ret)
10497 return ret;
10500 /* If we got this far, we need to build an exp. */
10501 exp = build_call_array_loc (loc, type, fn, n, argarray);
10502 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10503 return ret ? ret : exp;
10507 return build_call_array_loc (loc, type, fn, n, argarray);
10510 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10511 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10512 of arguments in ARGS to be omitted. OLDNARGS is the number of
10513 elements in ARGS. */
10515 static tree
10516 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10517 int skip, tree fndecl, int n, va_list newargs)
10519 int nargs = oldnargs - skip + n;
10520 tree *buffer;
10522 if (n > 0)
10524 int i, j;
10526 buffer = XALLOCAVEC (tree, nargs);
10527 for (i = 0; i < n; i++)
10528 buffer[i] = va_arg (newargs, tree);
10529 for (j = skip; j < oldnargs; j++, i++)
10530 buffer[i] = args[j];
10532 else
10533 buffer = args + skip;
10535 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10538 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10539 list ARGS along with N new arguments specified as the "..."
10540 parameters. SKIP is the number of arguments in ARGS to be omitted.
10541 OLDNARGS is the number of elements in ARGS. */
10543 static tree
10544 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
10545 int skip, tree fndecl, int n, ...)
10547 va_list ap;
10548 tree t;
10550 va_start (ap, n);
10551 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
10552 va_end (ap);
10554 return t;
10557 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10558 along with N new arguments specified as the "..." parameters. SKIP
10559 is the number of arguments in EXP to be omitted. This function is used
10560 to do varargs-to-varargs transformations. */
10562 static tree
10563 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10565 va_list ap;
10566 tree t;
10568 va_start (ap, n);
10569 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10570 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10571 va_end (ap);
10573 return t;
10576 /* Validate a single argument ARG against a tree code CODE representing
10577 a type. */
10579 static bool
10580 validate_arg (const_tree arg, enum tree_code code)
10582 if (!arg)
10583 return false;
10584 else if (code == POINTER_TYPE)
10585 return POINTER_TYPE_P (TREE_TYPE (arg));
10586 else if (code == INTEGER_TYPE)
10587 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10588 return code == TREE_CODE (TREE_TYPE (arg));
10591 /* This function validates the types of a function call argument list
10592 against a specified list of tree_codes. If the last specifier is a 0,
10593 that represents an ellipses, otherwise the last specifier must be a
10594 VOID_TYPE.
10596 This is the GIMPLE version of validate_arglist. Eventually we want to
10597 completely convert builtins.c to work from GIMPLEs and the tree based
10598 validate_arglist will then be removed. */
10600 bool
10601 validate_gimple_arglist (const_gimple call, ...)
10603 enum tree_code code;
10604 bool res = 0;
10605 va_list ap;
10606 const_tree arg;
10607 size_t i;
10609 va_start (ap, call);
10610 i = 0;
10614 code = (enum tree_code) va_arg (ap, int);
10615 switch (code)
10617 case 0:
10618 /* This signifies an ellipses, any further arguments are all ok. */
10619 res = true;
10620 goto end;
10621 case VOID_TYPE:
10622 /* This signifies an endlink, if no arguments remain, return
10623 true, otherwise return false. */
10624 res = (i == gimple_call_num_args (call));
10625 goto end;
10626 default:
10627 /* If no parameters remain or the parameter's code does not
10628 match the specified code, return false. Otherwise continue
10629 checking any remaining arguments. */
10630 arg = gimple_call_arg (call, i++);
10631 if (!validate_arg (arg, code))
10632 goto end;
10633 break;
10636 while (1);
10638 /* We need gotos here since we can only have one VA_CLOSE in a
10639 function. */
10640 end: ;
10641 va_end (ap);
10643 return res;
10646 /* This function validates the types of a function call argument list
10647 against a specified list of tree_codes. If the last specifier is a 0,
10648 that represents an ellipses, otherwise the last specifier must be a
10649 VOID_TYPE. */
10651 bool
10652 validate_arglist (const_tree callexpr, ...)
10654 enum tree_code code;
10655 bool res = 0;
10656 va_list ap;
10657 const_call_expr_arg_iterator iter;
10658 const_tree arg;
10660 va_start (ap, callexpr);
10661 init_const_call_expr_arg_iterator (callexpr, &iter);
10665 code = (enum tree_code) va_arg (ap, int);
10666 switch (code)
10668 case 0:
10669 /* This signifies an ellipses, any further arguments are all ok. */
10670 res = true;
10671 goto end;
10672 case VOID_TYPE:
10673 /* This signifies an endlink, if no arguments remain, return
10674 true, otherwise return false. */
10675 res = !more_const_call_expr_args_p (&iter);
10676 goto end;
10677 default:
10678 /* If no parameters remain or the parameter's code does not
10679 match the specified code, return false. Otherwise continue
10680 checking any remaining arguments. */
10681 arg = next_const_call_expr_arg (&iter);
10682 if (!validate_arg (arg, code))
10683 goto end;
10684 break;
10687 while (1);
10689 /* We need gotos here since we can only have one VA_CLOSE in a
10690 function. */
10691 end: ;
10692 va_end (ap);
10694 return res;
10697 /* Default target-specific builtin expander that does nothing. */
10700 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10701 rtx target ATTRIBUTE_UNUSED,
10702 rtx subtarget ATTRIBUTE_UNUSED,
10703 enum machine_mode mode ATTRIBUTE_UNUSED,
10704 int ignore ATTRIBUTE_UNUSED)
10706 return NULL_RTX;
10709 /* Returns true is EXP represents data that would potentially reside
10710 in a readonly section. */
10712 static bool
10713 readonly_data_expr (tree exp)
10715 STRIP_NOPS (exp);
10717 if (TREE_CODE (exp) != ADDR_EXPR)
10718 return false;
10720 exp = get_base_address (TREE_OPERAND (exp, 0));
10721 if (!exp)
10722 return false;
10724 /* Make sure we call decl_readonly_section only for trees it
10725 can handle (since it returns true for everything it doesn't
10726 understand). */
10727 if (TREE_CODE (exp) == STRING_CST
10728 || TREE_CODE (exp) == CONSTRUCTOR
10729 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10730 return decl_readonly_section (exp, 0);
10731 else
10732 return false;
10735 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10736 to the call, and TYPE is its return type.
10738 Return NULL_TREE if no simplification was possible, otherwise return the
10739 simplified form of the call as a tree.
10741 The simplified form may be a constant or other expression which
10742 computes the same value, but in a more efficient manner (including
10743 calls to other builtin functions).
10745 The call may contain arguments which need to be evaluated, but
10746 which are not useful to determine the result of the call. In
10747 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10748 COMPOUND_EXPR will be an argument which must be evaluated.
10749 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10750 COMPOUND_EXPR in the chain will contain the tree for the simplified
10751 form of the builtin function call. */
10753 static tree
10754 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10756 if (!validate_arg (s1, POINTER_TYPE)
10757 || !validate_arg (s2, POINTER_TYPE))
10758 return NULL_TREE;
10759 else
10761 tree fn;
10762 const char *p1, *p2;
10764 p2 = c_getstr (s2);
10765 if (p2 == NULL)
10766 return NULL_TREE;
10768 p1 = c_getstr (s1);
10769 if (p1 != NULL)
10771 const char *r = strstr (p1, p2);
10772 tree tem;
10774 if (r == NULL)
10775 return build_int_cst (TREE_TYPE (s1), 0);
10777 /* Return an offset into the constant string argument. */
10778 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10779 s1, size_int (r - p1));
10780 return fold_convert_loc (loc, type, tem);
10783 /* The argument is const char *, and the result is char *, so we need
10784 a type conversion here to avoid a warning. */
10785 if (p2[0] == '\0')
10786 return fold_convert_loc (loc, type, s1);
10788 if (p2[1] != '\0')
10789 return NULL_TREE;
10791 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10792 if (!fn)
10793 return NULL_TREE;
10795 /* New argument list transforming strstr(s1, s2) to
10796 strchr(s1, s2[0]). */
10797 return build_call_expr_loc (loc, fn, 2, s1,
10798 build_int_cst (integer_type_node, p2[0]));
10802 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10803 the call, and TYPE is its return type.
10805 Return NULL_TREE if no simplification was possible, otherwise return the
10806 simplified form of the call as a tree.
10808 The simplified form may be a constant or other expression which
10809 computes the same value, but in a more efficient manner (including
10810 calls to other builtin functions).
10812 The call may contain arguments which need to be evaluated, but
10813 which are not useful to determine the result of the call. In
10814 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10815 COMPOUND_EXPR will be an argument which must be evaluated.
10816 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10817 COMPOUND_EXPR in the chain will contain the tree for the simplified
10818 form of the builtin function call. */
10820 static tree
10821 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10823 if (!validate_arg (s1, POINTER_TYPE)
10824 || !validate_arg (s2, INTEGER_TYPE))
10825 return NULL_TREE;
10826 else
10828 const char *p1;
10830 if (TREE_CODE (s2) != INTEGER_CST)
10831 return NULL_TREE;
10833 p1 = c_getstr (s1);
10834 if (p1 != NULL)
10836 char c;
10837 const char *r;
10838 tree tem;
10840 if (target_char_cast (s2, &c))
10841 return NULL_TREE;
10843 r = strchr (p1, c);
10845 if (r == NULL)
10846 return build_int_cst (TREE_TYPE (s1), 0);
10848 /* Return an offset into the constant string argument. */
10849 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10850 s1, size_int (r - p1));
10851 return fold_convert_loc (loc, type, tem);
10853 return NULL_TREE;
10857 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10858 the call, and TYPE is its return type.
10860 Return NULL_TREE if no simplification was possible, otherwise return the
10861 simplified form of the call as a tree.
10863 The simplified form may be a constant or other expression which
10864 computes the same value, but in a more efficient manner (including
10865 calls to other builtin functions).
10867 The call may contain arguments which need to be evaluated, but
10868 which are not useful to determine the result of the call. In
10869 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10870 COMPOUND_EXPR will be an argument which must be evaluated.
10871 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10872 COMPOUND_EXPR in the chain will contain the tree for the simplified
10873 form of the builtin function call. */
10875 static tree
10876 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10878 if (!validate_arg (s1, POINTER_TYPE)
10879 || !validate_arg (s2, INTEGER_TYPE))
10880 return NULL_TREE;
10881 else
10883 tree fn;
10884 const char *p1;
10886 if (TREE_CODE (s2) != INTEGER_CST)
10887 return NULL_TREE;
10889 p1 = c_getstr (s1);
10890 if (p1 != NULL)
10892 char c;
10893 const char *r;
10894 tree tem;
10896 if (target_char_cast (s2, &c))
10897 return NULL_TREE;
10899 r = strrchr (p1, c);
10901 if (r == NULL)
10902 return build_int_cst (TREE_TYPE (s1), 0);
10904 /* Return an offset into the constant string argument. */
10905 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10906 s1, size_int (r - p1));
10907 return fold_convert_loc (loc, type, tem);
10910 if (! integer_zerop (s2))
10911 return NULL_TREE;
10913 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10914 if (!fn)
10915 return NULL_TREE;
10917 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10918 return build_call_expr_loc (loc, fn, 2, s1, s2);
10922 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10923 to the call, and TYPE is its return type.
10925 Return NULL_TREE if no simplification was possible, otherwise return the
10926 simplified form of the call as a tree.
10928 The simplified form may be a constant or other expression which
10929 computes the same value, but in a more efficient manner (including
10930 calls to other builtin functions).
10932 The call may contain arguments which need to be evaluated, but
10933 which are not useful to determine the result of the call. In
10934 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10935 COMPOUND_EXPR will be an argument which must be evaluated.
10936 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10937 COMPOUND_EXPR in the chain will contain the tree for the simplified
10938 form of the builtin function call. */
10940 static tree
10941 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10943 if (!validate_arg (s1, POINTER_TYPE)
10944 || !validate_arg (s2, POINTER_TYPE))
10945 return NULL_TREE;
10946 else
10948 tree fn;
10949 const char *p1, *p2;
10951 p2 = c_getstr (s2);
10952 if (p2 == NULL)
10953 return NULL_TREE;
10955 p1 = c_getstr (s1);
10956 if (p1 != NULL)
10958 const char *r = strpbrk (p1, p2);
10959 tree tem;
10961 if (r == NULL)
10962 return build_int_cst (TREE_TYPE (s1), 0);
10964 /* Return an offset into the constant string argument. */
10965 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10966 s1, size_int (r - p1));
10967 return fold_convert_loc (loc, type, tem);
10970 if (p2[0] == '\0')
10971 /* strpbrk(x, "") == NULL.
10972 Evaluate and ignore s1 in case it had side-effects. */
10973 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
10975 if (p2[1] != '\0')
10976 return NULL_TREE; /* Really call strpbrk. */
10978 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10979 if (!fn)
10980 return NULL_TREE;
10982 /* New argument list transforming strpbrk(s1, s2) to
10983 strchr(s1, s2[0]). */
10984 return build_call_expr_loc (loc, fn, 2, s1,
10985 build_int_cst (integer_type_node, p2[0]));
10989 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10990 to the call.
10992 Return NULL_TREE if no simplification was possible, otherwise return the
10993 simplified form of the call as a tree.
10995 The simplified form may be a constant or other expression which
10996 computes the same value, but in a more efficient manner (including
10997 calls to other builtin functions).
10999 The call may contain arguments which need to be evaluated, but
11000 which are not useful to determine the result of the call. In
11001 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11002 COMPOUND_EXPR will be an argument which must be evaluated.
11003 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11004 COMPOUND_EXPR in the chain will contain the tree for the simplified
11005 form of the builtin function call. */
11007 static tree
11008 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11010 if (!validate_arg (dst, POINTER_TYPE)
11011 || !validate_arg (src, POINTER_TYPE))
11012 return NULL_TREE;
11013 else
11015 const char *p = c_getstr (src);
11017 /* If the string length is zero, return the dst parameter. */
11018 if (p && *p == '\0')
11019 return dst;
11021 if (optimize_insn_for_speed_p ())
11023 /* See if we can store by pieces into (dst + strlen(dst)). */
11024 tree newdst, call;
11025 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11026 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11028 if (!strlen_fn || !strcpy_fn)
11029 return NULL_TREE;
11031 /* If we don't have a movstr we don't want to emit an strcpy
11032 call. We have to do that if the length of the source string
11033 isn't computable (in that case we can use memcpy probably
11034 later expanding to a sequence of mov instructions). If we
11035 have movstr instructions we can emit strcpy calls. */
11036 if (!HAVE_movstr)
11038 tree len = c_strlen (src, 1);
11039 if (! len || TREE_SIDE_EFFECTS (len))
11040 return NULL_TREE;
11043 /* Stabilize the argument list. */
11044 dst = builtin_save_expr (dst);
11046 /* Create strlen (dst). */
11047 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11048 /* Create (dst p+ strlen (dst)). */
11050 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11051 TREE_TYPE (dst), dst, newdst);
11052 newdst = builtin_save_expr (newdst);
11054 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11055 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11057 return NULL_TREE;
11061 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11062 arguments to the call.
11064 Return NULL_TREE if no simplification was possible, otherwise return the
11065 simplified form of the call as a tree.
11067 The simplified form may be a constant or other expression which
11068 computes the same value, but in a more efficient manner (including
11069 calls to other builtin functions).
11071 The call may contain arguments which need to be evaluated, but
11072 which are not useful to determine the result of the call. In
11073 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11074 COMPOUND_EXPR will be an argument which must be evaluated.
11075 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11076 COMPOUND_EXPR in the chain will contain the tree for the simplified
11077 form of the builtin function call. */
11079 static tree
11080 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11082 if (!validate_arg (dst, POINTER_TYPE)
11083 || !validate_arg (src, POINTER_TYPE)
11084 || !validate_arg (len, INTEGER_TYPE))
11085 return NULL_TREE;
11086 else
11088 const char *p = c_getstr (src);
11090 /* If the requested length is zero, or the src parameter string
11091 length is zero, return the dst parameter. */
11092 if (integer_zerop (len) || (p && *p == '\0'))
11093 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11095 /* If the requested len is greater than or equal to the string
11096 length, call strcat. */
11097 if (TREE_CODE (len) == INTEGER_CST && p
11098 && compare_tree_int (len, strlen (p)) >= 0)
11100 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11102 /* If the replacement _DECL isn't initialized, don't do the
11103 transformation. */
11104 if (!fn)
11105 return NULL_TREE;
11107 return build_call_expr_loc (loc, fn, 2, dst, src);
11109 return NULL_TREE;
11113 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11114 to the call.
11116 Return NULL_TREE if no simplification was possible, otherwise return the
11117 simplified form of the call as a tree.
11119 The simplified form may be a constant or other expression which
11120 computes the same value, but in a more efficient manner (including
11121 calls to other builtin functions).
11123 The call may contain arguments which need to be evaluated, but
11124 which are not useful to determine the result of the call. In
11125 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11126 COMPOUND_EXPR will be an argument which must be evaluated.
11127 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11128 COMPOUND_EXPR in the chain will contain the tree for the simplified
11129 form of the builtin function call. */
11131 static tree
11132 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11134 if (!validate_arg (s1, POINTER_TYPE)
11135 || !validate_arg (s2, POINTER_TYPE))
11136 return NULL_TREE;
11137 else
11139 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11141 /* If both arguments are constants, evaluate at compile-time. */
11142 if (p1 && p2)
11144 const size_t r = strspn (p1, p2);
11145 return size_int (r);
11148 /* If either argument is "", return NULL_TREE. */
11149 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11150 /* Evaluate and ignore both arguments in case either one has
11151 side-effects. */
11152 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11153 s1, s2);
11154 return NULL_TREE;
11158 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11159 to the call.
11161 Return NULL_TREE if no simplification was possible, otherwise return the
11162 simplified form of the call as a tree.
11164 The simplified form may be a constant or other expression which
11165 computes the same value, but in a more efficient manner (including
11166 calls to other builtin functions).
11168 The call may contain arguments which need to be evaluated, but
11169 which are not useful to determine the result of the call. In
11170 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11171 COMPOUND_EXPR will be an argument which must be evaluated.
11172 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11173 COMPOUND_EXPR in the chain will contain the tree for the simplified
11174 form of the builtin function call. */
11176 static tree
11177 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11179 if (!validate_arg (s1, POINTER_TYPE)
11180 || !validate_arg (s2, POINTER_TYPE))
11181 return NULL_TREE;
11182 else
11184 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11186 /* If both arguments are constants, evaluate at compile-time. */
11187 if (p1 && p2)
11189 const size_t r = strcspn (p1, p2);
11190 return size_int (r);
11193 /* If the first argument is "", return NULL_TREE. */
11194 if (p1 && *p1 == '\0')
11196 /* Evaluate and ignore argument s2 in case it has
11197 side-effects. */
11198 return omit_one_operand_loc (loc, size_type_node,
11199 size_zero_node, s2);
11202 /* If the second argument is "", return __builtin_strlen(s1). */
11203 if (p2 && *p2 == '\0')
11205 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11207 /* If the replacement _DECL isn't initialized, don't do the
11208 transformation. */
11209 if (!fn)
11210 return NULL_TREE;
11212 return build_call_expr_loc (loc, fn, 1, s1);
11214 return NULL_TREE;
11218 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11219 to the call. IGNORE is true if the value returned
11220 by the builtin will be ignored. UNLOCKED is true is true if this
11221 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11222 the known length of the string. Return NULL_TREE if no simplification
11223 was possible. */
11225 tree
11226 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11227 bool ignore, bool unlocked, tree len)
11229 /* If we're using an unlocked function, assume the other unlocked
11230 functions exist explicitly. */
11231 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11232 : implicit_built_in_decls[BUILT_IN_FPUTC];
11233 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11234 : implicit_built_in_decls[BUILT_IN_FWRITE];
11236 /* If the return value is used, don't do the transformation. */
11237 if (!ignore)
11238 return NULL_TREE;
11240 /* Verify the arguments in the original call. */
11241 if (!validate_arg (arg0, POINTER_TYPE)
11242 || !validate_arg (arg1, POINTER_TYPE))
11243 return NULL_TREE;
11245 if (! len)
11246 len = c_strlen (arg0, 0);
11248 /* Get the length of the string passed to fputs. If the length
11249 can't be determined, punt. */
11250 if (!len
11251 || TREE_CODE (len) != INTEGER_CST)
11252 return NULL_TREE;
11254 switch (compare_tree_int (len, 1))
11256 case -1: /* length is 0, delete the call entirely . */
11257 return omit_one_operand_loc (loc, integer_type_node,
11258 integer_zero_node, arg1);;
11260 case 0: /* length is 1, call fputc. */
11262 const char *p = c_getstr (arg0);
11264 if (p != NULL)
11266 if (fn_fputc)
11267 return build_call_expr_loc (loc, fn_fputc, 2,
11268 build_int_cst
11269 (integer_type_node, p[0]), arg1);
11270 else
11271 return NULL_TREE;
11274 /* FALLTHROUGH */
11275 case 1: /* length is greater than 1, call fwrite. */
11277 /* If optimizing for size keep fputs. */
11278 if (optimize_function_for_size_p (cfun))
11279 return NULL_TREE;
11280 /* New argument list transforming fputs(string, stream) to
11281 fwrite(string, 1, len, stream). */
11282 if (fn_fwrite)
11283 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11284 size_one_node, len, arg1);
11285 else
11286 return NULL_TREE;
11288 default:
11289 gcc_unreachable ();
11291 return NULL_TREE;
11294 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11295 produced. False otherwise. This is done so that we don't output the error
11296 or warning twice or three times. */
11298 bool
11299 fold_builtin_next_arg (tree exp, bool va_start_p)
11301 tree fntype = TREE_TYPE (current_function_decl);
11302 int nargs = call_expr_nargs (exp);
11303 tree arg;
11305 if (!stdarg_p (fntype))
11307 error ("%<va_start%> used in function with fixed args");
11308 return true;
11311 if (va_start_p)
11313 if (va_start_p && (nargs != 2))
11315 error ("wrong number of arguments to function %<va_start%>");
11316 return true;
11318 arg = CALL_EXPR_ARG (exp, 1);
11320 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11321 when we checked the arguments and if needed issued a warning. */
11322 else
11324 if (nargs == 0)
11326 /* Evidently an out of date version of <stdarg.h>; can't validate
11327 va_start's second argument, but can still work as intended. */
11328 warning (0, "%<__builtin_next_arg%> called without an argument");
11329 return true;
11331 else if (nargs > 1)
11333 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11334 return true;
11336 arg = CALL_EXPR_ARG (exp, 0);
11339 if (TREE_CODE (arg) == SSA_NAME)
11340 arg = SSA_NAME_VAR (arg);
11342 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11343 or __builtin_next_arg (0) the first time we see it, after checking
11344 the arguments and if needed issuing a warning. */
11345 if (!integer_zerop (arg))
11347 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11349 /* Strip off all nops for the sake of the comparison. This
11350 is not quite the same as STRIP_NOPS. It does more.
11351 We must also strip off INDIRECT_EXPR for C++ reference
11352 parameters. */
11353 while (CONVERT_EXPR_P (arg)
11354 || TREE_CODE (arg) == INDIRECT_REF)
11355 arg = TREE_OPERAND (arg, 0);
11356 if (arg != last_parm)
11358 /* FIXME: Sometimes with the tree optimizers we can get the
11359 not the last argument even though the user used the last
11360 argument. We just warn and set the arg to be the last
11361 argument so that we will get wrong-code because of
11362 it. */
11363 warning (0, "second parameter of %<va_start%> not last named argument");
11366 /* Undefined by C99 7.15.1.4p4 (va_start):
11367 "If the parameter parmN is declared with the register storage
11368 class, with a function or array type, or with a type that is
11369 not compatible with the type that results after application of
11370 the default argument promotions, the behavior is undefined."
11372 else if (DECL_REGISTER (arg))
11373 warning (0, "undefined behaviour when second parameter of "
11374 "%<va_start%> is declared with %<register%> storage");
11376 /* We want to verify the second parameter just once before the tree
11377 optimizers are run and then avoid keeping it in the tree,
11378 as otherwise we could warn even for correct code like:
11379 void foo (int i, ...)
11380 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11381 if (va_start_p)
11382 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11383 else
11384 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11386 return false;
11390 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11391 ORIG may be null if this is a 2-argument call. We don't attempt to
11392 simplify calls with more than 3 arguments.
11394 Return NULL_TREE if no simplification was possible, otherwise return the
11395 simplified form of the call as a tree. If IGNORED is true, it means that
11396 the caller does not use the returned value of the function. */
11398 static tree
11399 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11400 tree orig, int ignored)
11402 tree call, retval;
11403 const char *fmt_str = NULL;
11405 /* Verify the required arguments in the original call. We deal with two
11406 types of sprintf() calls: 'sprintf (str, fmt)' and
11407 'sprintf (dest, "%s", orig)'. */
11408 if (!validate_arg (dest, POINTER_TYPE)
11409 || !validate_arg (fmt, POINTER_TYPE))
11410 return NULL_TREE;
11411 if (orig && !validate_arg (orig, POINTER_TYPE))
11412 return NULL_TREE;
11414 /* Check whether the format is a literal string constant. */
11415 fmt_str = c_getstr (fmt);
11416 if (fmt_str == NULL)
11417 return NULL_TREE;
11419 call = NULL_TREE;
11420 retval = NULL_TREE;
11422 if (!init_target_chars ())
11423 return NULL_TREE;
11425 /* If the format doesn't contain % args or %%, use strcpy. */
11426 if (strchr (fmt_str, target_percent) == NULL)
11428 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11430 if (!fn)
11431 return NULL_TREE;
11433 /* Don't optimize sprintf (buf, "abc", ptr++). */
11434 if (orig)
11435 return NULL_TREE;
11437 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11438 'format' is known to contain no % formats. */
11439 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11440 if (!ignored)
11441 retval = build_int_cst (integer_type_node, strlen (fmt_str));
11444 /* If the format is "%s", use strcpy if the result isn't used. */
11445 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11447 tree fn;
11448 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11450 if (!fn)
11451 return NULL_TREE;
11453 /* Don't crash on sprintf (str1, "%s"). */
11454 if (!orig)
11455 return NULL_TREE;
11457 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11458 if (!ignored)
11460 retval = c_strlen (orig, 1);
11461 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11462 return NULL_TREE;
11464 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11467 if (call && retval)
11469 retval = fold_convert_loc
11470 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11471 retval);
11472 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11474 else
11475 return call;
11478 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
11479 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
11480 attempt to simplify calls with more than 4 arguments.
11482 Return NULL_TREE if no simplification was possible, otherwise return the
11483 simplified form of the call as a tree. If IGNORED is true, it means that
11484 the caller does not use the returned value of the function. */
11486 static tree
11487 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
11488 tree orig, int ignored)
11490 tree call, retval;
11491 const char *fmt_str = NULL;
11492 unsigned HOST_WIDE_INT destlen;
11494 /* Verify the required arguments in the original call. We deal with two
11495 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
11496 'snprintf (dest, cst, "%s", orig)'. */
11497 if (!validate_arg (dest, POINTER_TYPE)
11498 || !validate_arg (destsize, INTEGER_TYPE)
11499 || !validate_arg (fmt, POINTER_TYPE))
11500 return NULL_TREE;
11501 if (orig && !validate_arg (orig, POINTER_TYPE))
11502 return NULL_TREE;
11504 if (!host_integerp (destsize, 1))
11505 return NULL_TREE;
11507 /* Check whether the format is a literal string constant. */
11508 fmt_str = c_getstr (fmt);
11509 if (fmt_str == NULL)
11510 return NULL_TREE;
11512 call = NULL_TREE;
11513 retval = NULL_TREE;
11515 if (!init_target_chars ())
11516 return NULL_TREE;
11518 destlen = tree_low_cst (destsize, 1);
11520 /* If the format doesn't contain % args or %%, use strcpy. */
11521 if (strchr (fmt_str, target_percent) == NULL)
11523 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11524 size_t len = strlen (fmt_str);
11526 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
11527 if (orig)
11528 return NULL_TREE;
11530 /* We could expand this as
11531 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
11532 or to
11533 memcpy (str, fmt_with_nul_at_cstm1, cst);
11534 but in the former case that might increase code size
11535 and in the latter case grow .rodata section too much.
11536 So punt for now. */
11537 if (len >= destlen)
11538 return NULL_TREE;
11540 if (!fn)
11541 return NULL_TREE;
11543 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
11544 'format' is known to contain no % formats and
11545 strlen (fmt) < cst. */
11546 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11548 if (!ignored)
11549 retval = build_int_cst (integer_type_node, strlen (fmt_str));
11552 /* If the format is "%s", use strcpy if the result isn't used. */
11553 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11555 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11556 unsigned HOST_WIDE_INT origlen;
11558 /* Don't crash on snprintf (str1, cst, "%s"). */
11559 if (!orig)
11560 return NULL_TREE;
11562 retval = c_strlen (orig, 1);
11563 if (!retval || !host_integerp (retval, 1))
11564 return NULL_TREE;
11566 origlen = tree_low_cst (retval, 1);
11567 /* We could expand this as
11568 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
11569 or to
11570 memcpy (str1, str2_with_nul_at_cstm1, cst);
11571 but in the former case that might increase code size
11572 and in the latter case grow .rodata section too much.
11573 So punt for now. */
11574 if (origlen >= destlen)
11575 return NULL_TREE;
11577 /* Convert snprintf (str1, cst, "%s", str2) into
11578 strcpy (str1, str2) if strlen (str2) < cst. */
11579 if (!fn)
11580 return NULL_TREE;
11582 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11584 if (ignored)
11585 retval = NULL_TREE;
11588 if (call && retval)
11590 tree fn = built_in_decls[BUILT_IN_SNPRINTF];
11591 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
11592 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11594 else
11595 return call;
11598 /* Expand a call EXP to __builtin_object_size. */
11601 expand_builtin_object_size (tree exp)
11603 tree ost;
11604 int object_size_type;
11605 tree fndecl = get_callee_fndecl (exp);
11607 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11609 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11610 exp, fndecl);
11611 expand_builtin_trap ();
11612 return const0_rtx;
11615 ost = CALL_EXPR_ARG (exp, 1);
11616 STRIP_NOPS (ost);
11618 if (TREE_CODE (ost) != INTEGER_CST
11619 || tree_int_cst_sgn (ost) < 0
11620 || compare_tree_int (ost, 3) > 0)
11622 error ("%Klast argument of %D is not integer constant between 0 and 3",
11623 exp, fndecl);
11624 expand_builtin_trap ();
11625 return const0_rtx;
11628 object_size_type = tree_low_cst (ost, 0);
11630 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11633 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11634 FCODE is the BUILT_IN_* to use.
11635 Return NULL_RTX if we failed; the caller should emit a normal call,
11636 otherwise try to get the result in TARGET, if convenient (and in
11637 mode MODE if that's convenient). */
11639 static rtx
11640 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11641 enum built_in_function fcode)
11643 tree dest, src, len, size;
11645 if (!validate_arglist (exp,
11646 POINTER_TYPE,
11647 fcode == BUILT_IN_MEMSET_CHK
11648 ? INTEGER_TYPE : POINTER_TYPE,
11649 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11650 return NULL_RTX;
11652 dest = CALL_EXPR_ARG (exp, 0);
11653 src = CALL_EXPR_ARG (exp, 1);
11654 len = CALL_EXPR_ARG (exp, 2);
11655 size = CALL_EXPR_ARG (exp, 3);
11657 if (! host_integerp (size, 1))
11658 return NULL_RTX;
11660 if (host_integerp (len, 1) || integer_all_onesp (size))
11662 tree fn;
11664 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11666 warning_at (tree_nonartificial_location (exp),
11667 0, "%Kcall to %D will always overflow destination buffer",
11668 exp, get_callee_fndecl (exp));
11669 return NULL_RTX;
11672 fn = NULL_TREE;
11673 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11674 mem{cpy,pcpy,move,set} is available. */
11675 switch (fcode)
11677 case BUILT_IN_MEMCPY_CHK:
11678 fn = built_in_decls[BUILT_IN_MEMCPY];
11679 break;
11680 case BUILT_IN_MEMPCPY_CHK:
11681 fn = built_in_decls[BUILT_IN_MEMPCPY];
11682 break;
11683 case BUILT_IN_MEMMOVE_CHK:
11684 fn = built_in_decls[BUILT_IN_MEMMOVE];
11685 break;
11686 case BUILT_IN_MEMSET_CHK:
11687 fn = built_in_decls[BUILT_IN_MEMSET];
11688 break;
11689 default:
11690 break;
11693 if (! fn)
11694 return NULL_RTX;
11696 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11697 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11698 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11699 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11701 else if (fcode == BUILT_IN_MEMSET_CHK)
11702 return NULL_RTX;
11703 else
11705 unsigned int dest_align
11706 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11708 /* If DEST is not a pointer type, call the normal function. */
11709 if (dest_align == 0)
11710 return NULL_RTX;
11712 /* If SRC and DEST are the same (and not volatile), do nothing. */
11713 if (operand_equal_p (src, dest, 0))
11715 tree expr;
11717 if (fcode != BUILT_IN_MEMPCPY_CHK)
11719 /* Evaluate and ignore LEN in case it has side-effects. */
11720 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11721 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11724 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11725 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11728 /* __memmove_chk special case. */
11729 if (fcode == BUILT_IN_MEMMOVE_CHK)
11731 unsigned int src_align
11732 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11734 if (src_align == 0)
11735 return NULL_RTX;
11737 /* If src is categorized for a readonly section we can use
11738 normal __memcpy_chk. */
11739 if (readonly_data_expr (src))
11741 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11742 if (!fn)
11743 return NULL_RTX;
11744 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11745 dest, src, len, size);
11746 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11747 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11748 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11751 return NULL_RTX;
11755 /* Emit warning if a buffer overflow is detected at compile time. */
11757 static void
11758 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11760 int is_strlen = 0;
11761 tree len, size;
11762 location_t loc = tree_nonartificial_location (exp);
11764 switch (fcode)
11766 case BUILT_IN_STRCPY_CHK:
11767 case BUILT_IN_STPCPY_CHK:
11768 /* For __strcat_chk the warning will be emitted only if overflowing
11769 by at least strlen (dest) + 1 bytes. */
11770 case BUILT_IN_STRCAT_CHK:
11771 len = CALL_EXPR_ARG (exp, 1);
11772 size = CALL_EXPR_ARG (exp, 2);
11773 is_strlen = 1;
11774 break;
11775 case BUILT_IN_STRNCAT_CHK:
11776 case BUILT_IN_STRNCPY_CHK:
11777 len = CALL_EXPR_ARG (exp, 2);
11778 size = CALL_EXPR_ARG (exp, 3);
11779 break;
11780 case BUILT_IN_SNPRINTF_CHK:
11781 case BUILT_IN_VSNPRINTF_CHK:
11782 len = CALL_EXPR_ARG (exp, 1);
11783 size = CALL_EXPR_ARG (exp, 3);
11784 break;
11785 default:
11786 gcc_unreachable ();
11789 if (!len || !size)
11790 return;
11792 if (! host_integerp (size, 1) || integer_all_onesp (size))
11793 return;
11795 if (is_strlen)
11797 len = c_strlen (len, 1);
11798 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11799 return;
11801 else if (fcode == BUILT_IN_STRNCAT_CHK)
11803 tree src = CALL_EXPR_ARG (exp, 1);
11804 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11805 return;
11806 src = c_strlen (src, 1);
11807 if (! src || ! host_integerp (src, 1))
11809 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11810 exp, get_callee_fndecl (exp));
11811 return;
11813 else if (tree_int_cst_lt (src, size))
11814 return;
11816 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11817 return;
11819 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11820 exp, get_callee_fndecl (exp));
11823 /* Emit warning if a buffer overflow is detected at compile time
11824 in __sprintf_chk/__vsprintf_chk calls. */
11826 static void
11827 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11829 tree size, len, fmt;
11830 const char *fmt_str;
11831 int nargs = call_expr_nargs (exp);
11833 /* Verify the required arguments in the original call. */
11835 if (nargs < 4)
11836 return;
11837 size = CALL_EXPR_ARG (exp, 2);
11838 fmt = CALL_EXPR_ARG (exp, 3);
11840 if (! host_integerp (size, 1) || integer_all_onesp (size))
11841 return;
11843 /* Check whether the format is a literal string constant. */
11844 fmt_str = c_getstr (fmt);
11845 if (fmt_str == NULL)
11846 return;
11848 if (!init_target_chars ())
11849 return;
11851 /* If the format doesn't contain % args or %%, we know its size. */
11852 if (strchr (fmt_str, target_percent) == 0)
11853 len = build_int_cstu (size_type_node, strlen (fmt_str));
11854 /* If the format is "%s" and first ... argument is a string literal,
11855 we know it too. */
11856 else if (fcode == BUILT_IN_SPRINTF_CHK
11857 && strcmp (fmt_str, target_percent_s) == 0)
11859 tree arg;
11861 if (nargs < 5)
11862 return;
11863 arg = CALL_EXPR_ARG (exp, 4);
11864 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11865 return;
11867 len = c_strlen (arg, 1);
11868 if (!len || ! host_integerp (len, 1))
11869 return;
11871 else
11872 return;
11874 if (! tree_int_cst_lt (len, size))
11875 warning_at (tree_nonartificial_location (exp),
11876 0, "%Kcall to %D will always overflow destination buffer",
11877 exp, get_callee_fndecl (exp));
11880 /* Emit warning if a free is called with address of a variable. */
11882 static void
11883 maybe_emit_free_warning (tree exp)
11885 tree arg = CALL_EXPR_ARG (exp, 0);
11887 STRIP_NOPS (arg);
11888 if (TREE_CODE (arg) != ADDR_EXPR)
11889 return;
11891 arg = get_base_address (TREE_OPERAND (arg, 0));
11892 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11893 return;
11895 if (SSA_VAR_P (arg))
11896 warning_at (tree_nonartificial_location (exp),
11897 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11898 else
11899 warning_at (tree_nonartificial_location (exp),
11900 0, "%Kattempt to free a non-heap object", exp);
11903 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11904 if possible. */
11906 tree
11907 fold_builtin_object_size (tree ptr, tree ost)
11909 unsigned HOST_WIDE_INT bytes;
11910 int object_size_type;
11912 if (!validate_arg (ptr, POINTER_TYPE)
11913 || !validate_arg (ost, INTEGER_TYPE))
11914 return NULL_TREE;
11916 STRIP_NOPS (ost);
11918 if (TREE_CODE (ost) != INTEGER_CST
11919 || tree_int_cst_sgn (ost) < 0
11920 || compare_tree_int (ost, 3) > 0)
11921 return NULL_TREE;
11923 object_size_type = tree_low_cst (ost, 0);
11925 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11926 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11927 and (size_t) 0 for types 2 and 3. */
11928 if (TREE_SIDE_EFFECTS (ptr))
11929 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11931 if (TREE_CODE (ptr) == ADDR_EXPR)
11933 bytes = compute_builtin_object_size (ptr, object_size_type);
11934 if (double_int_fits_to_tree_p (size_type_node,
11935 uhwi_to_double_int (bytes)))
11936 return build_int_cstu (size_type_node, bytes);
11938 else if (TREE_CODE (ptr) == SSA_NAME)
11940 /* If object size is not known yet, delay folding until
11941 later. Maybe subsequent passes will help determining
11942 it. */
11943 bytes = compute_builtin_object_size (ptr, object_size_type);
11944 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
11945 && double_int_fits_to_tree_p (size_type_node,
11946 uhwi_to_double_int (bytes)))
11947 return build_int_cstu (size_type_node, bytes);
11950 return NULL_TREE;
11953 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11954 DEST, SRC, LEN, and SIZE are the arguments to the call.
11955 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11956 code of the builtin. If MAXLEN is not NULL, it is maximum length
11957 passed as third argument. */
11959 tree
11960 fold_builtin_memory_chk (location_t loc, tree fndecl,
11961 tree dest, tree src, tree len, tree size,
11962 tree maxlen, bool ignore,
11963 enum built_in_function fcode)
11965 tree fn;
11967 if (!validate_arg (dest, POINTER_TYPE)
11968 || !validate_arg (src,
11969 (fcode == BUILT_IN_MEMSET_CHK
11970 ? INTEGER_TYPE : POINTER_TYPE))
11971 || !validate_arg (len, INTEGER_TYPE)
11972 || !validate_arg (size, INTEGER_TYPE))
11973 return NULL_TREE;
11975 /* If SRC and DEST are the same (and not volatile), return DEST
11976 (resp. DEST+LEN for __mempcpy_chk). */
11977 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11979 if (fcode != BUILT_IN_MEMPCPY_CHK)
11980 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
11981 dest, len);
11982 else
11984 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
11985 dest, len);
11986 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
11990 if (! host_integerp (size, 1))
11991 return NULL_TREE;
11993 if (! integer_all_onesp (size))
11995 if (! host_integerp (len, 1))
11997 /* If LEN is not constant, try MAXLEN too.
11998 For MAXLEN only allow optimizing into non-_ocs function
11999 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12000 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12002 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12004 /* (void) __mempcpy_chk () can be optimized into
12005 (void) __memcpy_chk (). */
12006 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12007 if (!fn)
12008 return NULL_TREE;
12010 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12012 return NULL_TREE;
12015 else
12016 maxlen = len;
12018 if (tree_int_cst_lt (size, maxlen))
12019 return NULL_TREE;
12022 fn = NULL_TREE;
12023 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12024 mem{cpy,pcpy,move,set} is available. */
12025 switch (fcode)
12027 case BUILT_IN_MEMCPY_CHK:
12028 fn = built_in_decls[BUILT_IN_MEMCPY];
12029 break;
12030 case BUILT_IN_MEMPCPY_CHK:
12031 fn = built_in_decls[BUILT_IN_MEMPCPY];
12032 break;
12033 case BUILT_IN_MEMMOVE_CHK:
12034 fn = built_in_decls[BUILT_IN_MEMMOVE];
12035 break;
12036 case BUILT_IN_MEMSET_CHK:
12037 fn = built_in_decls[BUILT_IN_MEMSET];
12038 break;
12039 default:
12040 break;
12043 if (!fn)
12044 return NULL_TREE;
12046 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12049 /* Fold a call to the __st[rp]cpy_chk builtin.
12050 DEST, SRC, and SIZE are the arguments to the call.
12051 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12052 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12053 strings passed as second argument. */
12055 tree
12056 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12057 tree src, tree size,
12058 tree maxlen, bool ignore,
12059 enum built_in_function fcode)
12061 tree len, fn;
12063 if (!validate_arg (dest, POINTER_TYPE)
12064 || !validate_arg (src, POINTER_TYPE)
12065 || !validate_arg (size, INTEGER_TYPE))
12066 return NULL_TREE;
12068 /* If SRC and DEST are the same (and not volatile), return DEST. */
12069 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12070 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12072 if (! host_integerp (size, 1))
12073 return NULL_TREE;
12075 if (! integer_all_onesp (size))
12077 len = c_strlen (src, 1);
12078 if (! len || ! host_integerp (len, 1))
12080 /* If LEN is not constant, try MAXLEN too.
12081 For MAXLEN only allow optimizing into non-_ocs function
12082 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12083 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12085 if (fcode == BUILT_IN_STPCPY_CHK)
12087 if (! ignore)
12088 return NULL_TREE;
12090 /* If return value of __stpcpy_chk is ignored,
12091 optimize into __strcpy_chk. */
12092 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12093 if (!fn)
12094 return NULL_TREE;
12096 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12099 if (! len || TREE_SIDE_EFFECTS (len))
12100 return NULL_TREE;
12102 /* If c_strlen returned something, but not a constant,
12103 transform __strcpy_chk into __memcpy_chk. */
12104 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12105 if (!fn)
12106 return NULL_TREE;
12108 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12109 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12110 build_call_expr_loc (loc, fn, 4,
12111 dest, src, len, size));
12114 else
12115 maxlen = len;
12117 if (! tree_int_cst_lt (maxlen, size))
12118 return NULL_TREE;
12121 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12122 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12123 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12124 if (!fn)
12125 return NULL_TREE;
12127 return build_call_expr_loc (loc, fn, 2, dest, src);
12130 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12131 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12132 length passed as third argument. */
12134 tree
12135 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12136 tree len, tree size, tree maxlen)
12138 tree fn;
12140 if (!validate_arg (dest, POINTER_TYPE)
12141 || !validate_arg (src, POINTER_TYPE)
12142 || !validate_arg (len, INTEGER_TYPE)
12143 || !validate_arg (size, INTEGER_TYPE))
12144 return NULL_TREE;
12146 if (! host_integerp (size, 1))
12147 return NULL_TREE;
12149 if (! integer_all_onesp (size))
12151 if (! host_integerp (len, 1))
12153 /* If LEN is not constant, try MAXLEN too.
12154 For MAXLEN only allow optimizing into non-_ocs function
12155 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12156 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12157 return NULL_TREE;
12159 else
12160 maxlen = len;
12162 if (tree_int_cst_lt (size, maxlen))
12163 return NULL_TREE;
12166 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12167 fn = built_in_decls[BUILT_IN_STRNCPY];
12168 if (!fn)
12169 return NULL_TREE;
12171 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12174 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12175 are the arguments to the call. */
12177 static tree
12178 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12179 tree src, tree size)
12181 tree fn;
12182 const char *p;
12184 if (!validate_arg (dest, POINTER_TYPE)
12185 || !validate_arg (src, POINTER_TYPE)
12186 || !validate_arg (size, INTEGER_TYPE))
12187 return NULL_TREE;
12189 p = c_getstr (src);
12190 /* If the SRC parameter is "", return DEST. */
12191 if (p && *p == '\0')
12192 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12194 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12195 return NULL_TREE;
12197 /* If __builtin_strcat_chk is used, assume strcat is available. */
12198 fn = built_in_decls[BUILT_IN_STRCAT];
12199 if (!fn)
12200 return NULL_TREE;
12202 return build_call_expr_loc (loc, fn, 2, dest, src);
12205 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12206 LEN, and SIZE. */
12208 static tree
12209 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12210 tree dest, tree src, tree len, tree size)
12212 tree fn;
12213 const char *p;
12215 if (!validate_arg (dest, POINTER_TYPE)
12216 || !validate_arg (src, POINTER_TYPE)
12217 || !validate_arg (size, INTEGER_TYPE)
12218 || !validate_arg (size, INTEGER_TYPE))
12219 return NULL_TREE;
12221 p = c_getstr (src);
12222 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12223 if (p && *p == '\0')
12224 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12225 else if (integer_zerop (len))
12226 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12228 if (! host_integerp (size, 1))
12229 return NULL_TREE;
12231 if (! integer_all_onesp (size))
12233 tree src_len = c_strlen (src, 1);
12234 if (src_len
12235 && host_integerp (src_len, 1)
12236 && host_integerp (len, 1)
12237 && ! tree_int_cst_lt (len, src_len))
12239 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12240 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12241 if (!fn)
12242 return NULL_TREE;
12244 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12246 return NULL_TREE;
12249 /* If __builtin_strncat_chk is used, assume strncat is available. */
12250 fn = built_in_decls[BUILT_IN_STRNCAT];
12251 if (!fn)
12252 return NULL_TREE;
12254 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12257 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
12258 Return NULL_TREE if a normal call should be emitted rather than
12259 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
12260 or BUILT_IN_VSPRINTF_CHK. */
12262 static tree
12263 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
12264 enum built_in_function fcode)
12266 tree dest, size, len, fn, fmt, flag;
12267 const char *fmt_str;
12269 /* Verify the required arguments in the original call. */
12270 if (nargs < 4)
12271 return NULL_TREE;
12272 dest = args[0];
12273 if (!validate_arg (dest, POINTER_TYPE))
12274 return NULL_TREE;
12275 flag = args[1];
12276 if (!validate_arg (flag, INTEGER_TYPE))
12277 return NULL_TREE;
12278 size = args[2];
12279 if (!validate_arg (size, INTEGER_TYPE))
12280 return NULL_TREE;
12281 fmt = args[3];
12282 if (!validate_arg (fmt, POINTER_TYPE))
12283 return NULL_TREE;
12285 if (! host_integerp (size, 1))
12286 return NULL_TREE;
12288 len = NULL_TREE;
12290 if (!init_target_chars ())
12291 return NULL_TREE;
12293 /* Check whether the format is a literal string constant. */
12294 fmt_str = c_getstr (fmt);
12295 if (fmt_str != NULL)
12297 /* If the format doesn't contain % args or %%, we know the size. */
12298 if (strchr (fmt_str, target_percent) == 0)
12300 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12301 len = build_int_cstu (size_type_node, strlen (fmt_str));
12303 /* If the format is "%s" and first ... argument is a string literal,
12304 we know the size too. */
12305 else if (fcode == BUILT_IN_SPRINTF_CHK
12306 && strcmp (fmt_str, target_percent_s) == 0)
12308 tree arg;
12310 if (nargs == 5)
12312 arg = args[4];
12313 if (validate_arg (arg, POINTER_TYPE))
12315 len = c_strlen (arg, 1);
12316 if (! len || ! host_integerp (len, 1))
12317 len = NULL_TREE;
12323 if (! integer_all_onesp (size))
12325 if (! len || ! tree_int_cst_lt (len, size))
12326 return NULL_TREE;
12329 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12330 or if format doesn't contain % chars or is "%s". */
12331 if (! integer_zerop (flag))
12333 if (fmt_str == NULL)
12334 return NULL_TREE;
12335 if (strchr (fmt_str, target_percent) != NULL
12336 && strcmp (fmt_str, target_percent_s))
12337 return NULL_TREE;
12340 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12341 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12342 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12343 if (!fn)
12344 return NULL_TREE;
12346 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
12349 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12350 a normal call should be emitted rather than expanding the function
12351 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12353 static tree
12354 fold_builtin_sprintf_chk (location_t loc, tree exp,
12355 enum built_in_function fcode)
12357 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
12358 CALL_EXPR_ARGP (exp), fcode);
12361 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
12362 NULL_TREE if a normal call should be emitted rather than expanding
12363 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12364 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12365 passed as second argument. */
12367 static tree
12368 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
12369 tree maxlen, enum built_in_function fcode)
12371 tree dest, size, len, fn, fmt, flag;
12372 const char *fmt_str;
12374 /* Verify the required arguments in the original call. */
12375 if (nargs < 5)
12376 return NULL_TREE;
12377 dest = args[0];
12378 if (!validate_arg (dest, POINTER_TYPE))
12379 return NULL_TREE;
12380 len = args[1];
12381 if (!validate_arg (len, INTEGER_TYPE))
12382 return NULL_TREE;
12383 flag = args[2];
12384 if (!validate_arg (flag, INTEGER_TYPE))
12385 return NULL_TREE;
12386 size = args[3];
12387 if (!validate_arg (size, INTEGER_TYPE))
12388 return NULL_TREE;
12389 fmt = args[4];
12390 if (!validate_arg (fmt, POINTER_TYPE))
12391 return NULL_TREE;
12393 if (! host_integerp (size, 1))
12394 return NULL_TREE;
12396 if (! integer_all_onesp (size))
12398 if (! host_integerp (len, 1))
12400 /* If LEN is not constant, try MAXLEN too.
12401 For MAXLEN only allow optimizing into non-_ocs function
12402 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12403 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12404 return NULL_TREE;
12406 else
12407 maxlen = len;
12409 if (tree_int_cst_lt (size, maxlen))
12410 return NULL_TREE;
12413 if (!init_target_chars ())
12414 return NULL_TREE;
12416 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12417 or if format doesn't contain % chars or is "%s". */
12418 if (! integer_zerop (flag))
12420 fmt_str = c_getstr (fmt);
12421 if (fmt_str == NULL)
12422 return NULL_TREE;
12423 if (strchr (fmt_str, target_percent) != NULL
12424 && strcmp (fmt_str, target_percent_s))
12425 return NULL_TREE;
12428 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12429 available. */
12430 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12431 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12432 if (!fn)
12433 return NULL_TREE;
12435 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
12438 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12439 a normal call should be emitted rather than expanding the function
12440 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12441 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12442 passed as second argument. */
12444 tree
12445 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12446 enum built_in_function fcode)
12448 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
12449 CALL_EXPR_ARGP (exp), maxlen, fcode);
12452 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12453 FMT and ARG are the arguments to the call; we don't fold cases with
12454 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12456 Return NULL_TREE if no simplification was possible, otherwise return the
12457 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12458 code of the function to be simplified. */
12460 static tree
12461 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12462 tree arg, bool ignore,
12463 enum built_in_function fcode)
12465 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12466 const char *fmt_str = NULL;
12468 /* If the return value is used, don't do the transformation. */
12469 if (! ignore)
12470 return NULL_TREE;
12472 /* Verify the required arguments in the original call. */
12473 if (!validate_arg (fmt, POINTER_TYPE))
12474 return NULL_TREE;
12476 /* Check whether the format is a literal string constant. */
12477 fmt_str = c_getstr (fmt);
12478 if (fmt_str == NULL)
12479 return NULL_TREE;
12481 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12483 /* If we're using an unlocked function, assume the other
12484 unlocked functions exist explicitly. */
12485 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12486 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12488 else
12490 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12491 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12494 if (!init_target_chars ())
12495 return NULL_TREE;
12497 if (strcmp (fmt_str, target_percent_s) == 0
12498 || strchr (fmt_str, target_percent) == NULL)
12500 const char *str;
12502 if (strcmp (fmt_str, target_percent_s) == 0)
12504 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12505 return NULL_TREE;
12507 if (!arg || !validate_arg (arg, POINTER_TYPE))
12508 return NULL_TREE;
12510 str = c_getstr (arg);
12511 if (str == NULL)
12512 return NULL_TREE;
12514 else
12516 /* The format specifier doesn't contain any '%' characters. */
12517 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12518 && arg)
12519 return NULL_TREE;
12520 str = fmt_str;
12523 /* If the string was "", printf does nothing. */
12524 if (str[0] == '\0')
12525 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12527 /* If the string has length of 1, call putchar. */
12528 if (str[1] == '\0')
12530 /* Given printf("c"), (where c is any one character,)
12531 convert "c"[0] to an int and pass that to the replacement
12532 function. */
12533 newarg = build_int_cst (integer_type_node, str[0]);
12534 if (fn_putchar)
12535 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12537 else
12539 /* If the string was "string\n", call puts("string"). */
12540 size_t len = strlen (str);
12541 if ((unsigned char)str[len - 1] == target_newline
12542 && (size_t) (int) len == len
12543 && (int) len > 0)
12545 char *newstr;
12546 tree offset_node, string_cst;
12548 /* Create a NUL-terminated string that's one char shorter
12549 than the original, stripping off the trailing '\n'. */
12550 newarg = build_string_literal (len, str);
12551 string_cst = string_constant (newarg, &offset_node);
12552 gcc_checking_assert (string_cst
12553 && (TREE_STRING_LENGTH (string_cst)
12554 == (int) len)
12555 && integer_zerop (offset_node)
12556 && (unsigned char)
12557 TREE_STRING_POINTER (string_cst)[len - 1]
12558 == target_newline);
12559 /* build_string_literal creates a new STRING_CST,
12560 modify it in place to avoid double copying. */
12561 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
12562 newstr[len - 1] = '\0';
12563 if (fn_puts)
12564 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12566 else
12567 /* We'd like to arrange to call fputs(string,stdout) here,
12568 but we need stdout and don't have a way to get it yet. */
12569 return NULL_TREE;
12573 /* The other optimizations can be done only on the non-va_list variants. */
12574 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12575 return NULL_TREE;
12577 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12578 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12580 if (!arg || !validate_arg (arg, POINTER_TYPE))
12581 return NULL_TREE;
12582 if (fn_puts)
12583 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12586 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12587 else if (strcmp (fmt_str, target_percent_c) == 0)
12589 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12590 return NULL_TREE;
12591 if (fn_putchar)
12592 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12595 if (!call)
12596 return NULL_TREE;
12598 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12601 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12602 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12603 more than 3 arguments, and ARG may be null in the 2-argument case.
12605 Return NULL_TREE if no simplification was possible, otherwise return the
12606 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12607 code of the function to be simplified. */
12609 static tree
12610 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12611 tree fmt, tree arg, bool ignore,
12612 enum built_in_function fcode)
12614 tree fn_fputc, fn_fputs, call = NULL_TREE;
12615 const char *fmt_str = NULL;
12617 /* If the return value is used, don't do the transformation. */
12618 if (! ignore)
12619 return NULL_TREE;
12621 /* Verify the required arguments in the original call. */
12622 if (!validate_arg (fp, POINTER_TYPE))
12623 return NULL_TREE;
12624 if (!validate_arg (fmt, POINTER_TYPE))
12625 return NULL_TREE;
12627 /* Check whether the format is a literal string constant. */
12628 fmt_str = c_getstr (fmt);
12629 if (fmt_str == NULL)
12630 return NULL_TREE;
12632 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12634 /* If we're using an unlocked function, assume the other
12635 unlocked functions exist explicitly. */
12636 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12637 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12639 else
12641 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12642 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12645 if (!init_target_chars ())
12646 return NULL_TREE;
12648 /* If the format doesn't contain % args or %%, use strcpy. */
12649 if (strchr (fmt_str, target_percent) == NULL)
12651 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12652 && arg)
12653 return NULL_TREE;
12655 /* If the format specifier was "", fprintf does nothing. */
12656 if (fmt_str[0] == '\0')
12658 /* If FP has side-effects, just wait until gimplification is
12659 done. */
12660 if (TREE_SIDE_EFFECTS (fp))
12661 return NULL_TREE;
12663 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12666 /* When "string" doesn't contain %, replace all cases of
12667 fprintf (fp, string) with fputs (string, fp). The fputs
12668 builtin will take care of special cases like length == 1. */
12669 if (fn_fputs)
12670 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12673 /* The other optimizations can be done only on the non-va_list variants. */
12674 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12675 return NULL_TREE;
12677 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12678 else if (strcmp (fmt_str, target_percent_s) == 0)
12680 if (!arg || !validate_arg (arg, POINTER_TYPE))
12681 return NULL_TREE;
12682 if (fn_fputs)
12683 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12686 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12687 else if (strcmp (fmt_str, target_percent_c) == 0)
12689 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12690 return NULL_TREE;
12691 if (fn_fputc)
12692 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12695 if (!call)
12696 return NULL_TREE;
12697 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12700 /* Initialize format string characters in the target charset. */
12702 static bool
12703 init_target_chars (void)
12705 static bool init;
12706 if (!init)
12708 target_newline = lang_hooks.to_target_charset ('\n');
12709 target_percent = lang_hooks.to_target_charset ('%');
12710 target_c = lang_hooks.to_target_charset ('c');
12711 target_s = lang_hooks.to_target_charset ('s');
12712 if (target_newline == 0 || target_percent == 0 || target_c == 0
12713 || target_s == 0)
12714 return false;
12716 target_percent_c[0] = target_percent;
12717 target_percent_c[1] = target_c;
12718 target_percent_c[2] = '\0';
12720 target_percent_s[0] = target_percent;
12721 target_percent_s[1] = target_s;
12722 target_percent_s[2] = '\0';
12724 target_percent_s_newline[0] = target_percent;
12725 target_percent_s_newline[1] = target_s;
12726 target_percent_s_newline[2] = target_newline;
12727 target_percent_s_newline[3] = '\0';
12729 init = true;
12731 return true;
12734 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12735 and no overflow/underflow occurred. INEXACT is true if M was not
12736 exactly calculated. TYPE is the tree type for the result. This
12737 function assumes that you cleared the MPFR flags and then
12738 calculated M to see if anything subsequently set a flag prior to
12739 entering this function. Return NULL_TREE if any checks fail. */
12741 static tree
12742 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12744 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12745 overflow/underflow occurred. If -frounding-math, proceed iff the
12746 result of calling FUNC was exact. */
12747 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12748 && (!flag_rounding_math || !inexact))
12750 REAL_VALUE_TYPE rr;
12752 real_from_mpfr (&rr, m, type, GMP_RNDN);
12753 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12754 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12755 but the mpft_t is not, then we underflowed in the
12756 conversion. */
12757 if (real_isfinite (&rr)
12758 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12760 REAL_VALUE_TYPE rmode;
12762 real_convert (&rmode, TYPE_MODE (type), &rr);
12763 /* Proceed iff the specified mode can hold the value. */
12764 if (real_identical (&rmode, &rr))
12765 return build_real (type, rmode);
12768 return NULL_TREE;
12771 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12772 number and no overflow/underflow occurred. INEXACT is true if M
12773 was not exactly calculated. TYPE is the tree type for the result.
12774 This function assumes that you cleared the MPFR flags and then
12775 calculated M to see if anything subsequently set a flag prior to
12776 entering this function. Return NULL_TREE if any checks fail, if
12777 FORCE_CONVERT is true, then bypass the checks. */
12779 static tree
12780 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12782 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12783 overflow/underflow occurred. If -frounding-math, proceed iff the
12784 result of calling FUNC was exact. */
12785 if (force_convert
12786 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12787 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12788 && (!flag_rounding_math || !inexact)))
12790 REAL_VALUE_TYPE re, im;
12792 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
12793 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
12794 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12795 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12796 but the mpft_t is not, then we underflowed in the
12797 conversion. */
12798 if (force_convert
12799 || (real_isfinite (&re) && real_isfinite (&im)
12800 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12801 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12803 REAL_VALUE_TYPE re_mode, im_mode;
12805 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12806 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12807 /* Proceed iff the specified mode can hold the value. */
12808 if (force_convert
12809 || (real_identical (&re_mode, &re)
12810 && real_identical (&im_mode, &im)))
12811 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12812 build_real (TREE_TYPE (type), im_mode));
12815 return NULL_TREE;
12818 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12819 FUNC on it and return the resulting value as a tree with type TYPE.
12820 If MIN and/or MAX are not NULL, then the supplied ARG must be
12821 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12822 acceptable values, otherwise they are not. The mpfr precision is
12823 set to the precision of TYPE. We assume that function FUNC returns
12824 zero if the result could be calculated exactly within the requested
12825 precision. */
12827 static tree
12828 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12829 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12830 bool inclusive)
12832 tree result = NULL_TREE;
12834 STRIP_NOPS (arg);
12836 /* To proceed, MPFR must exactly represent the target floating point
12837 format, which only happens when the target base equals two. */
12838 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12839 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12841 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12843 if (real_isfinite (ra)
12844 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12845 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12847 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12848 const int prec = fmt->p;
12849 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12850 int inexact;
12851 mpfr_t m;
12853 mpfr_init2 (m, prec);
12854 mpfr_from_real (m, ra, GMP_RNDN);
12855 mpfr_clear_flags ();
12856 inexact = func (m, m, rnd);
12857 result = do_mpfr_ckconv (m, type, inexact);
12858 mpfr_clear (m);
12862 return result;
12865 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12866 FUNC on it and return the resulting value as a tree with type TYPE.
12867 The mpfr precision is set to the precision of TYPE. We assume that
12868 function FUNC returns zero if the result could be calculated
12869 exactly within the requested precision. */
12871 static tree
12872 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12873 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12875 tree result = NULL_TREE;
12877 STRIP_NOPS (arg1);
12878 STRIP_NOPS (arg2);
12880 /* To proceed, MPFR must exactly represent the target floating point
12881 format, which only happens when the target base equals two. */
12882 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12883 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12884 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12886 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12887 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12889 if (real_isfinite (ra1) && real_isfinite (ra2))
12891 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12892 const int prec = fmt->p;
12893 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12894 int inexact;
12895 mpfr_t m1, m2;
12897 mpfr_inits2 (prec, m1, m2, NULL);
12898 mpfr_from_real (m1, ra1, GMP_RNDN);
12899 mpfr_from_real (m2, ra2, GMP_RNDN);
12900 mpfr_clear_flags ();
12901 inexact = func (m1, m1, m2, rnd);
12902 result = do_mpfr_ckconv (m1, type, inexact);
12903 mpfr_clears (m1, m2, NULL);
12907 return result;
12910 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12911 FUNC on it and return the resulting value as a tree with type TYPE.
12912 The mpfr precision is set to the precision of TYPE. We assume that
12913 function FUNC returns zero if the result could be calculated
12914 exactly within the requested precision. */
12916 static tree
12917 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12918 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12920 tree result = NULL_TREE;
12922 STRIP_NOPS (arg1);
12923 STRIP_NOPS (arg2);
12924 STRIP_NOPS (arg3);
12926 /* To proceed, MPFR must exactly represent the target floating point
12927 format, which only happens when the target base equals two. */
12928 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12929 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12930 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12931 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12933 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12934 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12935 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12937 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12939 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12940 const int prec = fmt->p;
12941 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12942 int inexact;
12943 mpfr_t m1, m2, m3;
12945 mpfr_inits2 (prec, m1, m2, m3, NULL);
12946 mpfr_from_real (m1, ra1, GMP_RNDN);
12947 mpfr_from_real (m2, ra2, GMP_RNDN);
12948 mpfr_from_real (m3, ra3, GMP_RNDN);
12949 mpfr_clear_flags ();
12950 inexact = func (m1, m1, m2, m3, rnd);
12951 result = do_mpfr_ckconv (m1, type, inexact);
12952 mpfr_clears (m1, m2, m3, NULL);
12956 return result;
12959 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12960 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12961 If ARG_SINP and ARG_COSP are NULL then the result is returned
12962 as a complex value.
12963 The type is taken from the type of ARG and is used for setting the
12964 precision of the calculation and results. */
12966 static tree
12967 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12969 tree const type = TREE_TYPE (arg);
12970 tree result = NULL_TREE;
12972 STRIP_NOPS (arg);
12974 /* To proceed, MPFR must exactly represent the target floating point
12975 format, which only happens when the target base equals two. */
12976 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12977 && TREE_CODE (arg) == REAL_CST
12978 && !TREE_OVERFLOW (arg))
12980 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12982 if (real_isfinite (ra))
12984 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12985 const int prec = fmt->p;
12986 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12987 tree result_s, result_c;
12988 int inexact;
12989 mpfr_t m, ms, mc;
12991 mpfr_inits2 (prec, m, ms, mc, NULL);
12992 mpfr_from_real (m, ra, GMP_RNDN);
12993 mpfr_clear_flags ();
12994 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12995 result_s = do_mpfr_ckconv (ms, type, inexact);
12996 result_c = do_mpfr_ckconv (mc, type, inexact);
12997 mpfr_clears (m, ms, mc, NULL);
12998 if (result_s && result_c)
13000 /* If we are to return in a complex value do so. */
13001 if (!arg_sinp && !arg_cosp)
13002 return build_complex (build_complex_type (type),
13003 result_c, result_s);
13005 /* Dereference the sin/cos pointer arguments. */
13006 arg_sinp = build_fold_indirect_ref (arg_sinp);
13007 arg_cosp = build_fold_indirect_ref (arg_cosp);
13008 /* Proceed if valid pointer type were passed in. */
13009 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13010 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13012 /* Set the values. */
13013 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13014 result_s);
13015 TREE_SIDE_EFFECTS (result_s) = 1;
13016 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13017 result_c);
13018 TREE_SIDE_EFFECTS (result_c) = 1;
13019 /* Combine the assignments into a compound expr. */
13020 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13021 result_s, result_c));
13026 return result;
13029 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13030 two-argument mpfr order N Bessel function FUNC on them and return
13031 the resulting value as a tree with type TYPE. The mpfr precision
13032 is set to the precision of TYPE. We assume that function FUNC
13033 returns zero if the result could be calculated exactly within the
13034 requested precision. */
13035 static tree
13036 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13037 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13038 const REAL_VALUE_TYPE *min, bool inclusive)
13040 tree result = NULL_TREE;
13042 STRIP_NOPS (arg1);
13043 STRIP_NOPS (arg2);
13045 /* To proceed, MPFR must exactly represent the target floating point
13046 format, which only happens when the target base equals two. */
13047 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13048 && host_integerp (arg1, 0)
13049 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13051 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13052 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13054 if (n == (long)n
13055 && real_isfinite (ra)
13056 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13058 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13059 const int prec = fmt->p;
13060 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13061 int inexact;
13062 mpfr_t m;
13064 mpfr_init2 (m, prec);
13065 mpfr_from_real (m, ra, GMP_RNDN);
13066 mpfr_clear_flags ();
13067 inexact = func (m, n, m, rnd);
13068 result = do_mpfr_ckconv (m, type, inexact);
13069 mpfr_clear (m);
13073 return result;
13076 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13077 the pointer *(ARG_QUO) and return the result. The type is taken
13078 from the type of ARG0 and is used for setting the precision of the
13079 calculation and results. */
13081 static tree
13082 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13084 tree const type = TREE_TYPE (arg0);
13085 tree result = NULL_TREE;
13087 STRIP_NOPS (arg0);
13088 STRIP_NOPS (arg1);
13090 /* To proceed, MPFR must exactly represent the target floating point
13091 format, which only happens when the target base equals two. */
13092 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13093 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13094 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13096 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13097 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13099 if (real_isfinite (ra0) && real_isfinite (ra1))
13101 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13102 const int prec = fmt->p;
13103 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13104 tree result_rem;
13105 long integer_quo;
13106 mpfr_t m0, m1;
13108 mpfr_inits2 (prec, m0, m1, NULL);
13109 mpfr_from_real (m0, ra0, GMP_RNDN);
13110 mpfr_from_real (m1, ra1, GMP_RNDN);
13111 mpfr_clear_flags ();
13112 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13113 /* Remquo is independent of the rounding mode, so pass
13114 inexact=0 to do_mpfr_ckconv(). */
13115 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13116 mpfr_clears (m0, m1, NULL);
13117 if (result_rem)
13119 /* MPFR calculates quo in the host's long so it may
13120 return more bits in quo than the target int can hold
13121 if sizeof(host long) > sizeof(target int). This can
13122 happen even for native compilers in LP64 mode. In
13123 these cases, modulo the quo value with the largest
13124 number that the target int can hold while leaving one
13125 bit for the sign. */
13126 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13127 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13129 /* Dereference the quo pointer argument. */
13130 arg_quo = build_fold_indirect_ref (arg_quo);
13131 /* Proceed iff a valid pointer type was passed in. */
13132 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13134 /* Set the value. */
13135 tree result_quo
13136 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13137 build_int_cst (TREE_TYPE (arg_quo),
13138 integer_quo));
13139 TREE_SIDE_EFFECTS (result_quo) = 1;
13140 /* Combine the quo assignment with the rem. */
13141 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13142 result_quo, result_rem));
13147 return result;
13150 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13151 resulting value as a tree with type TYPE. The mpfr precision is
13152 set to the precision of TYPE. We assume that this mpfr function
13153 returns zero if the result could be calculated exactly within the
13154 requested precision. In addition, the integer pointer represented
13155 by ARG_SG will be dereferenced and set to the appropriate signgam
13156 (-1,1) value. */
13158 static tree
13159 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13161 tree result = NULL_TREE;
13163 STRIP_NOPS (arg);
13165 /* To proceed, MPFR must exactly represent the target floating point
13166 format, which only happens when the target base equals two. Also
13167 verify ARG is a constant and that ARG_SG is an int pointer. */
13168 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13169 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13170 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13171 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13173 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13175 /* In addition to NaN and Inf, the argument cannot be zero or a
13176 negative integer. */
13177 if (real_isfinite (ra)
13178 && ra->cl != rvc_zero
13179 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13181 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13182 const int prec = fmt->p;
13183 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13184 int inexact, sg;
13185 mpfr_t m;
13186 tree result_lg;
13188 mpfr_init2 (m, prec);
13189 mpfr_from_real (m, ra, GMP_RNDN);
13190 mpfr_clear_flags ();
13191 inexact = mpfr_lgamma (m, &sg, m, rnd);
13192 result_lg = do_mpfr_ckconv (m, type, inexact);
13193 mpfr_clear (m);
13194 if (result_lg)
13196 tree result_sg;
13198 /* Dereference the arg_sg pointer argument. */
13199 arg_sg = build_fold_indirect_ref (arg_sg);
13200 /* Assign the signgam value into *arg_sg. */
13201 result_sg = fold_build2 (MODIFY_EXPR,
13202 TREE_TYPE (arg_sg), arg_sg,
13203 build_int_cst (TREE_TYPE (arg_sg), sg));
13204 TREE_SIDE_EFFECTS (result_sg) = 1;
13205 /* Combine the signgam assignment with the lgamma result. */
13206 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13207 result_sg, result_lg));
13212 return result;
13215 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13216 function FUNC on it and return the resulting value as a tree with
13217 type TYPE. The mpfr precision is set to the precision of TYPE. We
13218 assume that function FUNC returns zero if the result could be
13219 calculated exactly within the requested precision. */
13221 static tree
13222 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13224 tree result = NULL_TREE;
13226 STRIP_NOPS (arg);
13228 /* To proceed, MPFR must exactly represent the target floating point
13229 format, which only happens when the target base equals two. */
13230 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13231 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13232 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13234 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13235 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13237 if (real_isfinite (re) && real_isfinite (im))
13239 const struct real_format *const fmt =
13240 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13241 const int prec = fmt->p;
13242 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13243 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13244 int inexact;
13245 mpc_t m;
13247 mpc_init2 (m, prec);
13248 mpfr_from_real (mpc_realref(m), re, rnd);
13249 mpfr_from_real (mpc_imagref(m), im, rnd);
13250 mpfr_clear_flags ();
13251 inexact = func (m, m, crnd);
13252 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13253 mpc_clear (m);
13257 return result;
13260 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13261 mpc function FUNC on it and return the resulting value as a tree
13262 with type TYPE. The mpfr precision is set to the precision of
13263 TYPE. We assume that function FUNC returns zero if the result
13264 could be calculated exactly within the requested precision. If
13265 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13266 in the arguments and/or results. */
13268 tree
13269 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13270 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13272 tree result = NULL_TREE;
13274 STRIP_NOPS (arg0);
13275 STRIP_NOPS (arg1);
13277 /* To proceed, MPFR must exactly represent the target floating point
13278 format, which only happens when the target base equals two. */
13279 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13280 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13281 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13282 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13283 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13285 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13286 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13287 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13288 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13290 if (do_nonfinite
13291 || (real_isfinite (re0) && real_isfinite (im0)
13292 && real_isfinite (re1) && real_isfinite (im1)))
13294 const struct real_format *const fmt =
13295 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13296 const int prec = fmt->p;
13297 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13298 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13299 int inexact;
13300 mpc_t m0, m1;
13302 mpc_init2 (m0, prec);
13303 mpc_init2 (m1, prec);
13304 mpfr_from_real (mpc_realref(m0), re0, rnd);
13305 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13306 mpfr_from_real (mpc_realref(m1), re1, rnd);
13307 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13308 mpfr_clear_flags ();
13309 inexact = func (m0, m0, m1, crnd);
13310 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13311 mpc_clear (m0);
13312 mpc_clear (m1);
13316 return result;
13319 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13320 a normal call should be emitted rather than expanding the function
13321 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13323 static tree
13324 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13326 int nargs = gimple_call_num_args (stmt);
13328 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
13329 (nargs > 0
13330 ? gimple_call_arg_ptr (stmt, 0)
13331 : &error_mark_node), fcode);
13334 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13335 a normal call should be emitted rather than expanding the function
13336 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13337 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13338 passed as second argument. */
13340 tree
13341 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13342 enum built_in_function fcode)
13344 int nargs = gimple_call_num_args (stmt);
13346 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
13347 (nargs > 0
13348 ? gimple_call_arg_ptr (stmt, 0)
13349 : &error_mark_node), maxlen, fcode);
13352 /* Builtins with folding operations that operate on "..." arguments
13353 need special handling; we need to store the arguments in a convenient
13354 data structure before attempting any folding. Fortunately there are
13355 only a few builtins that fall into this category. FNDECL is the
13356 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13357 result of the function call is ignored. */
13359 static tree
13360 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13361 bool ignore ATTRIBUTE_UNUSED)
13363 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13364 tree ret = NULL_TREE;
13366 switch (fcode)
13368 case BUILT_IN_SPRINTF_CHK:
13369 case BUILT_IN_VSPRINTF_CHK:
13370 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13371 break;
13373 case BUILT_IN_SNPRINTF_CHK:
13374 case BUILT_IN_VSNPRINTF_CHK:
13375 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13377 default:
13378 break;
13380 if (ret)
13382 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13383 TREE_NO_WARNING (ret) = 1;
13384 return ret;
13386 return NULL_TREE;
13389 /* A wrapper function for builtin folding that prevents warnings for
13390 "statement without effect" and the like, caused by removing the
13391 call node earlier than the warning is generated. */
13393 tree
13394 fold_call_stmt (gimple stmt, bool ignore)
13396 tree ret = NULL_TREE;
13397 tree fndecl = gimple_call_fndecl (stmt);
13398 location_t loc = gimple_location (stmt);
13399 if (fndecl
13400 && TREE_CODE (fndecl) == FUNCTION_DECL
13401 && DECL_BUILT_IN (fndecl)
13402 && !gimple_call_va_arg_pack_p (stmt))
13404 int nargs = gimple_call_num_args (stmt);
13405 tree *args = (nargs > 0
13406 ? gimple_call_arg_ptr (stmt, 0)
13407 : &error_mark_node);
13409 if (avoid_folding_inline_builtin (fndecl))
13410 return NULL_TREE;
13411 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13413 return targetm.fold_builtin (fndecl, nargs, args, ignore);
13415 else
13417 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13418 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13419 if (!ret)
13420 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13421 if (ret)
13423 /* Propagate location information from original call to
13424 expansion of builtin. Otherwise things like
13425 maybe_emit_chk_warning, that operate on the expansion
13426 of a builtin, will use the wrong location information. */
13427 if (gimple_has_location (stmt))
13429 tree realret = ret;
13430 if (TREE_CODE (ret) == NOP_EXPR)
13431 realret = TREE_OPERAND (ret, 0);
13432 if (CAN_HAVE_LOCATION_P (realret)
13433 && !EXPR_HAS_LOCATION (realret))
13434 SET_EXPR_LOCATION (realret, loc);
13435 return realret;
13437 return ret;
13441 return NULL_TREE;
13444 /* Look up the function in built_in_decls that corresponds to DECL
13445 and set ASMSPEC as its user assembler name. DECL must be a
13446 function decl that declares a builtin. */
13448 void
13449 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13451 tree builtin;
13452 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13453 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13454 && asmspec != 0);
13456 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13457 set_user_assembler_name (builtin, asmspec);
13458 switch (DECL_FUNCTION_CODE (decl))
13460 case BUILT_IN_MEMCPY:
13461 init_block_move_fn (asmspec);
13462 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13463 break;
13464 case BUILT_IN_MEMSET:
13465 init_block_clear_fn (asmspec);
13466 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13467 break;
13468 case BUILT_IN_MEMMOVE:
13469 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13470 break;
13471 case BUILT_IN_MEMCMP:
13472 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13473 break;
13474 case BUILT_IN_ABORT:
13475 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13476 break;
13477 case BUILT_IN_FFS:
13478 if (INT_TYPE_SIZE < BITS_PER_WORD)
13480 set_user_assembler_libfunc ("ffs", asmspec);
13481 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
13482 MODE_INT, 0), "ffs");
13484 break;
13485 default:
13486 break;
13490 /* Return true if DECL is a builtin that expands to a constant or similarly
13491 simple code. */
13492 bool
13493 is_simple_builtin (tree decl)
13495 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13496 switch (DECL_FUNCTION_CODE (decl))
13498 /* Builtins that expand to constants. */
13499 case BUILT_IN_CONSTANT_P:
13500 case BUILT_IN_EXPECT:
13501 case BUILT_IN_OBJECT_SIZE:
13502 case BUILT_IN_UNREACHABLE:
13503 /* Simple register moves or loads from stack. */
13504 case BUILT_IN_ASSUME_ALIGNED:
13505 case BUILT_IN_RETURN_ADDRESS:
13506 case BUILT_IN_EXTRACT_RETURN_ADDR:
13507 case BUILT_IN_FROB_RETURN_ADDR:
13508 case BUILT_IN_RETURN:
13509 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
13510 case BUILT_IN_FRAME_ADDRESS:
13511 case BUILT_IN_VA_END:
13512 case BUILT_IN_STACK_SAVE:
13513 case BUILT_IN_STACK_RESTORE:
13514 /* Exception state returns or moves registers around. */
13515 case BUILT_IN_EH_FILTER:
13516 case BUILT_IN_EH_POINTER:
13517 case BUILT_IN_EH_COPY_VALUES:
13518 return true;
13520 default:
13521 return false;
13524 return false;
13527 /* Return true if DECL is a builtin that is not expensive, i.e., they are
13528 most probably expanded inline into reasonably simple code. This is a
13529 superset of is_simple_builtin. */
13530 bool
13531 is_inexpensive_builtin (tree decl)
13533 if (!decl)
13534 return false;
13535 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
13536 return true;
13537 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
13538 switch (DECL_FUNCTION_CODE (decl))
13540 case BUILT_IN_ABS:
13541 case BUILT_IN_ALLOCA:
13542 case BUILT_IN_BSWAP32:
13543 case BUILT_IN_BSWAP64:
13544 case BUILT_IN_CLZ:
13545 case BUILT_IN_CLZIMAX:
13546 case BUILT_IN_CLZL:
13547 case BUILT_IN_CLZLL:
13548 case BUILT_IN_CTZ:
13549 case BUILT_IN_CTZIMAX:
13550 case BUILT_IN_CTZL:
13551 case BUILT_IN_CTZLL:
13552 case BUILT_IN_FFS:
13553 case BUILT_IN_FFSIMAX:
13554 case BUILT_IN_FFSL:
13555 case BUILT_IN_FFSLL:
13556 case BUILT_IN_IMAXABS:
13557 case BUILT_IN_FINITE:
13558 case BUILT_IN_FINITEF:
13559 case BUILT_IN_FINITEL:
13560 case BUILT_IN_FINITED32:
13561 case BUILT_IN_FINITED64:
13562 case BUILT_IN_FINITED128:
13563 case BUILT_IN_FPCLASSIFY:
13564 case BUILT_IN_ISFINITE:
13565 case BUILT_IN_ISINF_SIGN:
13566 case BUILT_IN_ISINF:
13567 case BUILT_IN_ISINFF:
13568 case BUILT_IN_ISINFL:
13569 case BUILT_IN_ISINFD32:
13570 case BUILT_IN_ISINFD64:
13571 case BUILT_IN_ISINFD128:
13572 case BUILT_IN_ISNAN:
13573 case BUILT_IN_ISNANF:
13574 case BUILT_IN_ISNANL:
13575 case BUILT_IN_ISNAND32:
13576 case BUILT_IN_ISNAND64:
13577 case BUILT_IN_ISNAND128:
13578 case BUILT_IN_ISNORMAL:
13579 case BUILT_IN_ISGREATER:
13580 case BUILT_IN_ISGREATEREQUAL:
13581 case BUILT_IN_ISLESS:
13582 case BUILT_IN_ISLESSEQUAL:
13583 case BUILT_IN_ISLESSGREATER:
13584 case BUILT_IN_ISUNORDERED:
13585 case BUILT_IN_VA_ARG_PACK:
13586 case BUILT_IN_VA_ARG_PACK_LEN:
13587 case BUILT_IN_VA_COPY:
13588 case BUILT_IN_TRAP:
13589 case BUILT_IN_SAVEREGS:
13590 case BUILT_IN_POPCOUNTL:
13591 case BUILT_IN_POPCOUNTLL:
13592 case BUILT_IN_POPCOUNTIMAX:
13593 case BUILT_IN_POPCOUNT:
13594 case BUILT_IN_PARITYL:
13595 case BUILT_IN_PARITYLL:
13596 case BUILT_IN_PARITYIMAX:
13597 case BUILT_IN_PARITY:
13598 case BUILT_IN_LABS:
13599 case BUILT_IN_LLABS:
13600 case BUILT_IN_PREFETCH:
13601 return true;
13603 default:
13604 return is_simple_builtin (decl);
13607 return false;