Fix DealII type problems.
[official-gcc/Ramakrishna.git] / gcc / builtins.c
blobdf38f5ecc6e37010468e077a88206739f092fd11
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef SLOW_UNALIGNED_ACCESS
55 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
56 #endif
58 #ifndef PAD_VARARGS_DOWN
59 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
60 #endif
61 #ifdef HAVE_mpc
62 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
63 #endif
65 /* Define the names of the builtin function types and codes. */
66 const char *const built_in_class_names[4]
67 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
69 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
70 const char * built_in_names[(int) END_BUILTINS] =
72 #include "builtins.def"
74 #undef DEF_BUILTIN
76 /* Setup an array of _DECL trees, make sure each element is
77 initialized to NULL_TREE. */
78 tree built_in_decls[(int) END_BUILTINS];
79 /* Declarations used when constructing the builtin implicitly in the compiler.
80 It may be NULL_TREE when this is invalid (for instance runtime is not
81 required to implement the function call in all cases). */
82 tree implicit_built_in_decls[(int) END_BUILTINS];
84 static const char *c_getstr (tree);
85 static rtx c_readstr (const char *, enum machine_mode);
86 static int target_char_cast (tree, char *);
87 static rtx get_memory_rtx (tree, tree);
88 static int apply_args_size (void);
89 static int apply_result_size (void);
90 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
91 static rtx result_vector (int, rtx);
92 #endif
93 static void expand_builtin_update_setjmp_buf (rtx);
94 static void expand_builtin_prefetch (tree);
95 static rtx expand_builtin_apply_args (void);
96 static rtx expand_builtin_apply_args_1 (void);
97 static rtx expand_builtin_apply (rtx, rtx, rtx);
98 static void expand_builtin_return (rtx);
99 static enum type_class type_to_class (tree);
100 static rtx expand_builtin_classify_type (tree);
101 static void expand_errno_check (tree, rtx);
102 static rtx expand_builtin_mathfn (tree, rtx, rtx);
103 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
106 static rtx expand_builtin_sincos (tree);
107 static rtx expand_builtin_cexpi (tree, rtx, rtx);
108 static rtx expand_builtin_int_roundingfn (tree, rtx);
109 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
110 static rtx expand_builtin_args_info (tree);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcmp (tree, rtx);
117 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
119 static rtx expand_builtin_memcpy (tree, rtx);
120 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
122 enum machine_mode, int);
123 static rtx expand_builtin_strcpy (tree, rtx);
124 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
125 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strncpy (tree, rtx);
127 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
128 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
129 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
130 static rtx expand_builtin_bzero (tree);
131 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
132 static rtx expand_builtin_alloca (tree, rtx);
133 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
134 static rtx expand_builtin_frame_address (tree, tree);
135 static tree stabilize_va_list_loc (location_t, tree, int);
136 static rtx expand_builtin_expect (tree, rtx);
137 static tree fold_builtin_constant_p (tree);
138 static tree fold_builtin_expect (location_t, tree, tree);
139 static tree fold_builtin_classify_type (tree);
140 static tree fold_builtin_strlen (location_t, tree);
141 static tree fold_builtin_inf (location_t, tree, int);
142 static tree fold_builtin_nan (tree, tree, int);
143 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
144 static bool validate_arg (const_tree, enum tree_code code);
145 static bool integer_valued_real_p (tree);
146 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
147 static bool readonly_data_expr (tree);
148 static rtx expand_builtin_fabs (tree, rtx, rtx);
149 static rtx expand_builtin_signbit (tree, rtx);
150 static tree fold_builtin_sqrt (location_t, tree, tree);
151 static tree fold_builtin_cbrt (location_t, tree, tree);
152 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
153 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
154 static tree fold_builtin_cos (location_t, tree, tree, tree);
155 static tree fold_builtin_cosh (location_t, tree, tree, tree);
156 static tree fold_builtin_tan (tree, tree);
157 static tree fold_builtin_trunc (location_t, tree, tree);
158 static tree fold_builtin_floor (location_t, tree, tree);
159 static tree fold_builtin_ceil (location_t, tree, tree);
160 static tree fold_builtin_round (location_t, tree, tree);
161 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
162 static tree fold_builtin_bitop (tree, tree);
163 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
164 static tree fold_builtin_strchr (location_t, tree, tree, tree);
165 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
167 static tree fold_builtin_strcmp (location_t, tree, tree);
168 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
169 static tree fold_builtin_signbit (location_t, tree, tree);
170 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_isascii (location_t, tree);
172 static tree fold_builtin_toascii (location_t, tree);
173 static tree fold_builtin_isdigit (location_t, tree);
174 static tree fold_builtin_fabs (location_t, tree, tree);
175 static tree fold_builtin_abs (location_t, tree, tree);
176 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
177 enum tree_code);
178 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
179 static tree fold_builtin_0 (location_t, tree, bool);
180 static tree fold_builtin_1 (location_t, tree, tree, bool);
181 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
182 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
183 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
184 static tree fold_builtin_varargs (location_t, tree, tree, bool);
186 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
187 static tree fold_builtin_strstr (location_t, tree, tree, tree);
188 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
189 static tree fold_builtin_strcat (location_t, tree, tree);
190 static tree fold_builtin_strncat (location_t, tree, tree, tree);
191 static tree fold_builtin_strspn (location_t, tree, tree);
192 static tree fold_builtin_strcspn (location_t, tree, tree);
193 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
195 static rtx expand_builtin_object_size (tree);
196 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
197 enum built_in_function);
198 static void maybe_emit_chk_warning (tree, enum built_in_function);
199 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
200 static void maybe_emit_free_warning (tree);
201 static tree fold_builtin_object_size (tree, tree);
202 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
203 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
204 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
205 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
206 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
207 enum built_in_function);
208 static bool init_target_chars (void);
210 static unsigned HOST_WIDE_INT target_newline;
211 static unsigned HOST_WIDE_INT target_percent;
212 static unsigned HOST_WIDE_INT target_c;
213 static unsigned HOST_WIDE_INT target_s;
214 static char target_percent_c[3];
215 static char target_percent_s[3];
216 static char target_percent_s_newline[4];
217 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
218 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
219 static tree do_mpfr_arg2 (tree, tree, tree,
220 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
221 static tree do_mpfr_arg3 (tree, tree, tree, tree,
222 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
223 static tree do_mpfr_sincos (tree, tree, tree);
224 static tree do_mpfr_bessel_n (tree, tree, tree,
225 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
226 const REAL_VALUE_TYPE *, bool);
227 static tree do_mpfr_remquo (tree, tree, tree);
228 static tree do_mpfr_lgamma_r (tree, tree, tree);
230 /* Return true if NAME starts with __builtin_ or __sync_. */
232 bool
233 is_builtin_name (const char *name)
235 if (strncmp (name, "__builtin_", 10) == 0)
236 return true;
237 if (strncmp (name, "__sync_", 7) == 0)
238 return true;
239 return false;
243 /* Return true if DECL is a function symbol representing a built-in. */
245 bool
246 is_builtin_fn (tree decl)
248 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
252 /* Return true if NODE should be considered for inline expansion regardless
253 of the optimization level. This means whenever a function is invoked with
254 its "internal" name, which normally contains the prefix "__builtin". */
256 static bool
257 called_as_built_in (tree node)
259 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
260 we want the name used to call the function, not the name it
261 will have. */
262 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
263 return is_builtin_name (name);
266 /* Return the alignment in bits of EXP, an object.
267 Don't return more than MAX_ALIGN no matter what, ALIGN is the inital
268 guessed alignment e.g. from type alignment. */
271 get_object_alignment (tree exp, unsigned int align, unsigned int max_align)
273 unsigned int inner;
275 inner = max_align;
276 if (handled_component_p (exp))
278 HOST_WIDE_INT bitsize, bitpos;
279 tree offset;
280 enum machine_mode mode;
281 int unsignedp, volatilep;
283 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
284 &mode, &unsignedp, &volatilep, true);
285 if (bitpos)
286 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
287 while (offset)
289 tree next_offset;
291 if (TREE_CODE (offset) == PLUS_EXPR)
293 next_offset = TREE_OPERAND (offset, 0);
294 offset = TREE_OPERAND (offset, 1);
296 else
297 next_offset = NULL;
298 if (host_integerp (offset, 1))
300 /* Any overflow in calculating offset_bits won't change
301 the alignment. */
302 unsigned offset_bits
303 = ((unsigned) tree_low_cst (offset, 1) * BITS_PER_UNIT);
305 if (offset_bits)
306 inner = MIN (inner, (offset_bits & -offset_bits));
308 else if (TREE_CODE (offset) == MULT_EXPR
309 && host_integerp (TREE_OPERAND (offset, 1), 1))
311 /* Any overflow in calculating offset_factor won't change
312 the alignment. */
313 unsigned offset_factor
314 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
315 * BITS_PER_UNIT);
317 if (offset_factor)
318 inner = MIN (inner, (offset_factor & -offset_factor));
320 else
322 inner = MIN (inner, BITS_PER_UNIT);
323 break;
325 offset = next_offset;
328 if (DECL_P (exp))
329 align = MIN (inner, DECL_ALIGN (exp));
330 #ifdef CONSTANT_ALIGNMENT
331 else if (CONSTANT_CLASS_P (exp))
332 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
333 #endif
334 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
335 || TREE_CODE (exp) == INDIRECT_REF)
336 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
337 else
338 align = MIN (align, inner);
339 return MIN (align, max_align);
342 /* Returns true iff we can trust that alignment information has been
343 calculated properly. */
345 bool
346 can_trust_pointer_alignment (void)
348 /* We rely on TER to compute accurate alignment information. */
349 return (optimize && flag_tree_ter);
352 /* Return the alignment in bits of EXP, a pointer valued expression.
353 But don't return more than MAX_ALIGN no matter what.
354 The alignment returned is, by default, the alignment of the thing that
355 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
357 Otherwise, look at the expression to see if we can do better, i.e., if the
358 expression is actually pointing at an object whose alignment is tighter. */
361 get_pointer_alignment (tree exp, unsigned int max_align)
363 unsigned int align, inner;
365 if (!can_trust_pointer_alignment ())
366 return 0;
368 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
369 return 0;
371 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
372 align = MIN (align, max_align);
374 while (1)
376 switch (TREE_CODE (exp))
378 CASE_CONVERT:
379 exp = TREE_OPERAND (exp, 0);
380 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
381 return align;
383 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
384 align = MIN (inner, max_align);
385 break;
387 case POINTER_PLUS_EXPR:
388 /* If sum of pointer + int, restrict our maximum alignment to that
389 imposed by the integer. If not, we can't do any better than
390 ALIGN. */
391 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
392 return align;
394 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
395 & (max_align / BITS_PER_UNIT - 1))
396 != 0)
397 max_align >>= 1;
399 exp = TREE_OPERAND (exp, 0);
400 break;
402 case ADDR_EXPR:
403 /* See what we are pointing at and look at its alignment. */
404 return get_object_alignment (TREE_OPERAND (exp, 0), align, max_align);
406 default:
407 return align;
412 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
413 way, because it could contain a zero byte in the middle.
414 TREE_STRING_LENGTH is the size of the character array, not the string.
416 ONLY_VALUE should be nonzero if the result is not going to be emitted
417 into the instruction stream and zero if it is going to be expanded.
418 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
419 is returned, otherwise NULL, since
420 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
421 evaluate the side-effects.
423 The value returned is of type `ssizetype'.
425 Unfortunately, string_constant can't access the values of const char
426 arrays with initializers, so neither can we do so here. */
428 tree
429 c_strlen (tree src, int only_value)
431 tree offset_node;
432 HOST_WIDE_INT offset;
433 int max;
434 const char *ptr;
436 STRIP_NOPS (src);
437 if (TREE_CODE (src) == COND_EXPR
438 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
440 tree len1, len2;
442 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
443 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
444 if (tree_int_cst_equal (len1, len2))
445 return len1;
448 if (TREE_CODE (src) == COMPOUND_EXPR
449 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
450 return c_strlen (TREE_OPERAND (src, 1), only_value);
452 src = string_constant (src, &offset_node);
453 if (src == 0)
454 return NULL_TREE;
456 max = TREE_STRING_LENGTH (src) - 1;
457 ptr = TREE_STRING_POINTER (src);
459 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
461 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
462 compute the offset to the following null if we don't know where to
463 start searching for it. */
464 int i;
466 for (i = 0; i < max; i++)
467 if (ptr[i] == 0)
468 return NULL_TREE;
470 /* We don't know the starting offset, but we do know that the string
471 has no internal zero bytes. We can assume that the offset falls
472 within the bounds of the string; otherwise, the programmer deserves
473 what he gets. Subtract the offset from the length of the string,
474 and return that. This would perhaps not be valid if we were dealing
475 with named arrays in addition to literal string constants. */
477 return size_diffop_loc (input_location, size_int (max), offset_node);
480 /* We have a known offset into the string. Start searching there for
481 a null character if we can represent it as a single HOST_WIDE_INT. */
482 if (offset_node == 0)
483 offset = 0;
484 else if (! host_integerp (offset_node, 0))
485 offset = -1;
486 else
487 offset = tree_low_cst (offset_node, 0);
489 /* If the offset is known to be out of bounds, warn, and call strlen at
490 runtime. */
491 if (offset < 0 || offset > max)
493 /* Suppress multiple warnings for propagated constant strings. */
494 if (! TREE_NO_WARNING (src))
496 warning (0, "offset outside bounds of constant string");
497 TREE_NO_WARNING (src) = 1;
499 return NULL_TREE;
502 /* Use strlen to search for the first zero byte. Since any strings
503 constructed with build_string will have nulls appended, we win even
504 if we get handed something like (char[4])"abcd".
506 Since OFFSET is our starting index into the string, no further
507 calculation is needed. */
508 return ssize_int (strlen (ptr + offset));
511 /* Return a char pointer for a C string if it is a string constant
512 or sum of string constant and integer constant. */
514 static const char *
515 c_getstr (tree src)
517 tree offset_node;
519 src = string_constant (src, &offset_node);
520 if (src == 0)
521 return 0;
523 if (offset_node == 0)
524 return TREE_STRING_POINTER (src);
525 else if (!host_integerp (offset_node, 1)
526 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
527 return 0;
529 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
532 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
533 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
535 static rtx
536 c_readstr (const char *str, enum machine_mode mode)
538 HOST_WIDE_INT c[2];
539 HOST_WIDE_INT ch;
540 unsigned int i, j;
542 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
544 c[0] = 0;
545 c[1] = 0;
546 ch = 1;
547 for (i = 0; i < GET_MODE_SIZE (mode); i++)
549 j = i;
550 if (WORDS_BIG_ENDIAN)
551 j = GET_MODE_SIZE (mode) - i - 1;
552 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
553 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
554 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
555 j *= BITS_PER_UNIT;
556 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
558 if (ch)
559 ch = (unsigned char) str[i];
560 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
562 return immed_double_const (c[0], c[1], mode);
565 /* Cast a target constant CST to target CHAR and if that value fits into
566 host char type, return zero and put that value into variable pointed to by
567 P. */
569 static int
570 target_char_cast (tree cst, char *p)
572 unsigned HOST_WIDE_INT val, hostval;
574 if (!host_integerp (cst, 1)
575 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
576 return 1;
578 val = tree_low_cst (cst, 1);
579 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
580 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
582 hostval = val;
583 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
584 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
586 if (val != hostval)
587 return 1;
589 *p = hostval;
590 return 0;
593 /* Similar to save_expr, but assumes that arbitrary code is not executed
594 in between the multiple evaluations. In particular, we assume that a
595 non-addressable local variable will not be modified. */
597 static tree
598 builtin_save_expr (tree exp)
600 if (TREE_ADDRESSABLE (exp) == 0
601 && (TREE_CODE (exp) == PARM_DECL
602 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
603 return exp;
605 return save_expr (exp);
608 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
609 times to get the address of either a higher stack frame, or a return
610 address located within it (depending on FNDECL_CODE). */
612 static rtx
613 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
615 int i;
617 #ifdef INITIAL_FRAME_ADDRESS_RTX
618 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
619 #else
620 rtx tem;
622 /* For a zero count with __builtin_return_address, we don't care what
623 frame address we return, because target-specific definitions will
624 override us. Therefore frame pointer elimination is OK, and using
625 the soft frame pointer is OK.
627 For a nonzero count, or a zero count with __builtin_frame_address,
628 we require a stable offset from the current frame pointer to the
629 previous one, so we must use the hard frame pointer, and
630 we must disable frame pointer elimination. */
631 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
632 tem = frame_pointer_rtx;
633 else
635 tem = hard_frame_pointer_rtx;
637 /* Tell reload not to eliminate the frame pointer. */
638 crtl->accesses_prior_frames = 1;
640 #endif
642 /* Some machines need special handling before we can access
643 arbitrary frames. For example, on the SPARC, we must first flush
644 all register windows to the stack. */
645 #ifdef SETUP_FRAME_ADDRESSES
646 if (count > 0)
647 SETUP_FRAME_ADDRESSES ();
648 #endif
650 /* On the SPARC, the return address is not in the frame, it is in a
651 register. There is no way to access it off of the current frame
652 pointer, but it can be accessed off the previous frame pointer by
653 reading the value from the register window save area. */
654 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
655 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
656 count--;
657 #endif
659 /* Scan back COUNT frames to the specified frame. */
660 for (i = 0; i < count; i++)
662 /* Assume the dynamic chain pointer is in the word that the
663 frame address points to, unless otherwise specified. */
664 #ifdef DYNAMIC_CHAIN_ADDRESS
665 tem = DYNAMIC_CHAIN_ADDRESS (tem);
666 #endif
667 tem = memory_address (Pmode, tem);
668 tem = gen_frame_mem (Pmode, tem);
669 tem = copy_to_reg (tem);
672 /* For __builtin_frame_address, return what we've got. But, on
673 the SPARC for example, we may have to add a bias. */
674 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
675 #ifdef FRAME_ADDR_RTX
676 return FRAME_ADDR_RTX (tem);
677 #else
678 return tem;
679 #endif
681 /* For __builtin_return_address, get the return address from that frame. */
682 #ifdef RETURN_ADDR_RTX
683 tem = RETURN_ADDR_RTX (count, tem);
684 #else
685 tem = memory_address (Pmode,
686 plus_constant (tem, GET_MODE_SIZE (Pmode)));
687 tem = gen_frame_mem (Pmode, tem);
688 #endif
689 return tem;
692 /* Alias set used for setjmp buffer. */
693 static alias_set_type setjmp_alias_set = -1;
695 /* Construct the leading half of a __builtin_setjmp call. Control will
696 return to RECEIVER_LABEL. This is also called directly by the SJLJ
697 exception handling code. */
699 void
700 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
702 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
703 rtx stack_save;
704 rtx mem;
706 if (setjmp_alias_set == -1)
707 setjmp_alias_set = new_alias_set ();
709 buf_addr = convert_memory_address (Pmode, buf_addr);
711 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
713 /* We store the frame pointer and the address of receiver_label in
714 the buffer and use the rest of it for the stack save area, which
715 is machine-dependent. */
717 mem = gen_rtx_MEM (Pmode, buf_addr);
718 set_mem_alias_set (mem, setjmp_alias_set);
719 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
721 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
722 set_mem_alias_set (mem, setjmp_alias_set);
724 emit_move_insn (validize_mem (mem),
725 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
727 stack_save = gen_rtx_MEM (sa_mode,
728 plus_constant (buf_addr,
729 2 * GET_MODE_SIZE (Pmode)));
730 set_mem_alias_set (stack_save, setjmp_alias_set);
731 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
733 /* If there is further processing to do, do it. */
734 #ifdef HAVE_builtin_setjmp_setup
735 if (HAVE_builtin_setjmp_setup)
736 emit_insn (gen_builtin_setjmp_setup (buf_addr));
737 #endif
739 /* Tell optimize_save_area_alloca that extra work is going to
740 need to go on during alloca. */
741 cfun->calls_setjmp = 1;
743 /* We have a nonlocal label. */
744 cfun->has_nonlocal_label = 1;
747 /* Construct the trailing part of a __builtin_setjmp call. This is
748 also called directly by the SJLJ exception handling code. */
750 void
751 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
753 rtx chain;
755 /* Clobber the FP when we get here, so we have to make sure it's
756 marked as used by this function. */
757 emit_use (hard_frame_pointer_rtx);
759 /* Mark the static chain as clobbered here so life information
760 doesn't get messed up for it. */
761 chain = targetm.calls.static_chain (current_function_decl, true);
762 if (chain && REG_P (chain))
763 emit_clobber (chain);
765 /* Now put in the code to restore the frame pointer, and argument
766 pointer, if needed. */
767 #ifdef HAVE_nonlocal_goto
768 if (! HAVE_nonlocal_goto)
769 #endif
771 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
772 /* This might change the hard frame pointer in ways that aren't
773 apparent to early optimization passes, so force a clobber. */
774 emit_clobber (hard_frame_pointer_rtx);
777 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
778 if (fixed_regs[ARG_POINTER_REGNUM])
780 #ifdef ELIMINABLE_REGS
781 size_t i;
782 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
784 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
785 if (elim_regs[i].from == ARG_POINTER_REGNUM
786 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
787 break;
789 if (i == ARRAY_SIZE (elim_regs))
790 #endif
792 /* Now restore our arg pointer from the address at which it
793 was saved in our stack frame. */
794 emit_move_insn (crtl->args.internal_arg_pointer,
795 copy_to_reg (get_arg_pointer_save_area ()));
798 #endif
800 #ifdef HAVE_builtin_setjmp_receiver
801 if (HAVE_builtin_setjmp_receiver)
802 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
803 else
804 #endif
805 #ifdef HAVE_nonlocal_goto_receiver
806 if (HAVE_nonlocal_goto_receiver)
807 emit_insn (gen_nonlocal_goto_receiver ());
808 else
809 #endif
810 { /* Nothing */ }
812 /* We must not allow the code we just generated to be reordered by
813 scheduling. Specifically, the update of the frame pointer must
814 happen immediately, not later. */
815 emit_insn (gen_blockage ());
818 /* __builtin_longjmp is passed a pointer to an array of five words (not
819 all will be used on all machines). It operates similarly to the C
820 library function of the same name, but is more efficient. Much of
821 the code below is copied from the handling of non-local gotos. */
823 static void
824 expand_builtin_longjmp (rtx buf_addr, rtx value)
826 rtx fp, lab, stack, insn, last;
827 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
829 /* DRAP is needed for stack realign if longjmp is expanded to current
830 function */
831 if (SUPPORTS_STACK_ALIGNMENT)
832 crtl->need_drap = true;
834 if (setjmp_alias_set == -1)
835 setjmp_alias_set = new_alias_set ();
837 buf_addr = convert_memory_address (Pmode, buf_addr);
839 buf_addr = force_reg (Pmode, buf_addr);
841 /* We require that the user must pass a second argument of 1, because
842 that is what builtin_setjmp will return. */
843 gcc_assert (value == const1_rtx);
845 last = get_last_insn ();
846 #ifdef HAVE_builtin_longjmp
847 if (HAVE_builtin_longjmp)
848 emit_insn (gen_builtin_longjmp (buf_addr));
849 else
850 #endif
852 fp = gen_rtx_MEM (Pmode, buf_addr);
853 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
854 GET_MODE_SIZE (Pmode)));
856 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
857 2 * GET_MODE_SIZE (Pmode)));
858 set_mem_alias_set (fp, setjmp_alias_set);
859 set_mem_alias_set (lab, setjmp_alias_set);
860 set_mem_alias_set (stack, setjmp_alias_set);
862 /* Pick up FP, label, and SP from the block and jump. This code is
863 from expand_goto in stmt.c; see there for detailed comments. */
864 #ifdef HAVE_nonlocal_goto
865 if (HAVE_nonlocal_goto)
866 /* We have to pass a value to the nonlocal_goto pattern that will
867 get copied into the static_chain pointer, but it does not matter
868 what that value is, because builtin_setjmp does not use it. */
869 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
870 else
871 #endif
873 lab = copy_to_reg (lab);
875 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
876 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
878 emit_move_insn (hard_frame_pointer_rtx, fp);
879 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
881 emit_use (hard_frame_pointer_rtx);
882 emit_use (stack_pointer_rtx);
883 emit_indirect_jump (lab);
887 /* Search backwards and mark the jump insn as a non-local goto.
888 Note that this precludes the use of __builtin_longjmp to a
889 __builtin_setjmp target in the same function. However, we've
890 already cautioned the user that these functions are for
891 internal exception handling use only. */
892 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
894 gcc_assert (insn != last);
896 if (JUMP_P (insn))
898 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
899 break;
901 else if (CALL_P (insn))
902 break;
906 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
907 and the address of the save area. */
909 static rtx
910 expand_builtin_nonlocal_goto (tree exp)
912 tree t_label, t_save_area;
913 rtx r_label, r_save_area, r_fp, r_sp, insn;
915 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
916 return NULL_RTX;
918 t_label = CALL_EXPR_ARG (exp, 0);
919 t_save_area = CALL_EXPR_ARG (exp, 1);
921 r_label = expand_normal (t_label);
922 r_label = convert_memory_address (Pmode, r_label);
923 r_save_area = expand_normal (t_save_area);
924 r_save_area = convert_memory_address (Pmode, r_save_area);
925 /* Copy the address of the save location to a register just in case it was based
926 on the frame pointer. */
927 r_save_area = copy_to_reg (r_save_area);
928 r_fp = gen_rtx_MEM (Pmode, r_save_area);
929 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
930 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
932 crtl->has_nonlocal_goto = 1;
934 #ifdef HAVE_nonlocal_goto
935 /* ??? We no longer need to pass the static chain value, afaik. */
936 if (HAVE_nonlocal_goto)
937 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
938 else
939 #endif
941 r_label = copy_to_reg (r_label);
943 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
944 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
946 /* Restore frame pointer for containing function.
947 This sets the actual hard register used for the frame pointer
948 to the location of the function's incoming static chain info.
949 The non-local goto handler will then adjust it to contain the
950 proper value and reload the argument pointer, if needed. */
951 emit_move_insn (hard_frame_pointer_rtx, r_fp);
952 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
954 /* USE of hard_frame_pointer_rtx added for consistency;
955 not clear if really needed. */
956 emit_use (hard_frame_pointer_rtx);
957 emit_use (stack_pointer_rtx);
959 /* If the architecture is using a GP register, we must
960 conservatively assume that the target function makes use of it.
961 The prologue of functions with nonlocal gotos must therefore
962 initialize the GP register to the appropriate value, and we
963 must then make sure that this value is live at the point
964 of the jump. (Note that this doesn't necessarily apply
965 to targets with a nonlocal_goto pattern; they are free
966 to implement it in their own way. Note also that this is
967 a no-op if the GP register is a global invariant.) */
968 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
969 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
970 emit_use (pic_offset_table_rtx);
972 emit_indirect_jump (r_label);
975 /* Search backwards to the jump insn and mark it as a
976 non-local goto. */
977 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
979 if (JUMP_P (insn))
981 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
982 break;
984 else if (CALL_P (insn))
985 break;
988 return const0_rtx;
991 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
992 (not all will be used on all machines) that was passed to __builtin_setjmp.
993 It updates the stack pointer in that block to correspond to the current
994 stack pointer. */
996 static void
997 expand_builtin_update_setjmp_buf (rtx buf_addr)
999 enum machine_mode sa_mode = Pmode;
1000 rtx stack_save;
1003 #ifdef HAVE_save_stack_nonlocal
1004 if (HAVE_save_stack_nonlocal)
1005 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
1006 #endif
1007 #ifdef STACK_SAVEAREA_MODE
1008 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1009 #endif
1011 stack_save
1012 = gen_rtx_MEM (sa_mode,
1013 memory_address
1014 (sa_mode,
1015 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
1017 #ifdef HAVE_setjmp
1018 if (HAVE_setjmp)
1019 emit_insn (gen_setjmp ());
1020 #endif
1022 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
1025 /* Expand a call to __builtin_prefetch. For a target that does not support
1026 data prefetch, evaluate the memory address argument in case it has side
1027 effects. */
1029 static void
1030 expand_builtin_prefetch (tree exp)
1032 tree arg0, arg1, arg2;
1033 int nargs;
1034 rtx op0, op1, op2;
1036 if (!validate_arglist (exp, POINTER_TYPE, 0))
1037 return;
1039 arg0 = CALL_EXPR_ARG (exp, 0);
1041 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1042 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1043 locality). */
1044 nargs = call_expr_nargs (exp);
1045 if (nargs > 1)
1046 arg1 = CALL_EXPR_ARG (exp, 1);
1047 else
1048 arg1 = integer_zero_node;
1049 if (nargs > 2)
1050 arg2 = CALL_EXPR_ARG (exp, 2);
1051 else
1052 arg2 = build_int_cst (NULL_TREE, 3);
1054 /* Argument 0 is an address. */
1055 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1057 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1058 if (TREE_CODE (arg1) != INTEGER_CST)
1060 error ("second argument to %<__builtin_prefetch%> must be a constant");
1061 arg1 = integer_zero_node;
1063 op1 = expand_normal (arg1);
1064 /* Argument 1 must be either zero or one. */
1065 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1067 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1068 " using zero");
1069 op1 = const0_rtx;
1072 /* Argument 2 (locality) must be a compile-time constant int. */
1073 if (TREE_CODE (arg2) != INTEGER_CST)
1075 error ("third argument to %<__builtin_prefetch%> must be a constant");
1076 arg2 = integer_zero_node;
1078 op2 = expand_normal (arg2);
1079 /* Argument 2 must be 0, 1, 2, or 3. */
1080 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1082 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1083 op2 = const0_rtx;
1086 #ifdef HAVE_prefetch
1087 if (HAVE_prefetch)
1089 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1090 (op0,
1091 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1092 || (GET_MODE (op0) != Pmode))
1094 op0 = convert_memory_address (Pmode, op0);
1095 op0 = force_reg (Pmode, op0);
1097 emit_insn (gen_prefetch (op0, op1, op2));
1099 #endif
1101 /* Don't do anything with direct references to volatile memory, but
1102 generate code to handle other side effects. */
1103 if (!MEM_P (op0) && side_effects_p (op0))
1104 emit_insn (op0);
1107 /* Get a MEM rtx for expression EXP which is the address of an operand
1108 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1109 the maximum length of the block of memory that might be accessed or
1110 NULL if unknown. */
1112 static rtx
1113 get_memory_rtx (tree exp, tree len)
1115 tree orig_exp = exp;
1116 rtx addr, mem;
1117 HOST_WIDE_INT off;
1119 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1120 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1121 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1122 exp = TREE_OPERAND (exp, 0);
1124 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1125 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1127 /* Get an expression we can use to find the attributes to assign to MEM.
1128 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1129 we can. First remove any nops. */
1130 while (CONVERT_EXPR_P (exp)
1131 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1132 exp = TREE_OPERAND (exp, 0);
1134 off = 0;
1135 if (TREE_CODE (exp) == POINTER_PLUS_EXPR
1136 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1137 && host_integerp (TREE_OPERAND (exp, 1), 0)
1138 && (off = tree_low_cst (TREE_OPERAND (exp, 1), 0)) > 0)
1139 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1140 else if (TREE_CODE (exp) == ADDR_EXPR)
1141 exp = TREE_OPERAND (exp, 0);
1142 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1143 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1144 else
1145 exp = NULL;
1147 /* Honor attributes derived from exp, except for the alias set
1148 (as builtin stringops may alias with anything) and the size
1149 (as stringops may access multiple array elements). */
1150 if (exp)
1152 set_mem_attributes (mem, exp, 0);
1154 if (off)
1155 mem = adjust_automodify_address_nv (mem, BLKmode, NULL, off);
1157 /* Allow the string and memory builtins to overflow from one
1158 field into another, see http://gcc.gnu.org/PR23561.
1159 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1160 memory accessed by the string or memory builtin will fit
1161 within the field. */
1162 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1164 tree mem_expr = MEM_EXPR (mem);
1165 HOST_WIDE_INT offset = -1, length = -1;
1166 tree inner = exp;
1168 while (TREE_CODE (inner) == ARRAY_REF
1169 || CONVERT_EXPR_P (inner)
1170 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1171 || TREE_CODE (inner) == SAVE_EXPR)
1172 inner = TREE_OPERAND (inner, 0);
1174 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1176 if (MEM_OFFSET (mem)
1177 && CONST_INT_P (MEM_OFFSET (mem)))
1178 offset = INTVAL (MEM_OFFSET (mem));
1180 if (offset >= 0 && len && host_integerp (len, 0))
1181 length = tree_low_cst (len, 0);
1183 while (TREE_CODE (inner) == COMPONENT_REF)
1185 tree field = TREE_OPERAND (inner, 1);
1186 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1187 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1189 /* Bitfields are generally not byte-addressable. */
1190 gcc_assert (!DECL_BIT_FIELD (field)
1191 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1192 % BITS_PER_UNIT) == 0
1193 && host_integerp (DECL_SIZE (field), 0)
1194 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1195 % BITS_PER_UNIT) == 0));
1197 /* If we can prove that the memory starting at XEXP (mem, 0) and
1198 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1199 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1200 fields without DECL_SIZE_UNIT like flexible array members. */
1201 if (length >= 0
1202 && DECL_SIZE_UNIT (field)
1203 && host_integerp (DECL_SIZE_UNIT (field), 0))
1205 HOST_WIDE_INT size
1206 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1207 if (offset <= size
1208 && length <= size
1209 && offset + length <= size)
1210 break;
1213 if (offset >= 0
1214 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1215 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1216 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1217 / BITS_PER_UNIT;
1218 else
1220 offset = -1;
1221 length = -1;
1224 mem_expr = TREE_OPERAND (mem_expr, 0);
1225 inner = TREE_OPERAND (inner, 0);
1228 if (mem_expr == NULL)
1229 offset = -1;
1230 if (mem_expr != MEM_EXPR (mem))
1232 set_mem_expr (mem, mem_expr);
1233 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1236 set_mem_alias_set (mem, 0);
1237 set_mem_size (mem, NULL_RTX);
1240 return mem;
1243 /* Built-in functions to perform an untyped call and return. */
1245 /* For each register that may be used for calling a function, this
1246 gives a mode used to copy the register's value. VOIDmode indicates
1247 the register is not used for calling a function. If the machine
1248 has register windows, this gives only the outbound registers.
1249 INCOMING_REGNO gives the corresponding inbound register. */
1250 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1252 /* For each register that may be used for returning values, this gives
1253 a mode used to copy the register's value. VOIDmode indicates the
1254 register is not used for returning values. If the machine has
1255 register windows, this gives only the outbound registers.
1256 INCOMING_REGNO gives the corresponding inbound register. */
1257 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1259 /* Return the size required for the block returned by __builtin_apply_args,
1260 and initialize apply_args_mode. */
1262 static int
1263 apply_args_size (void)
1265 static int size = -1;
1266 int align;
1267 unsigned int regno;
1268 enum machine_mode mode;
1270 /* The values computed by this function never change. */
1271 if (size < 0)
1273 /* The first value is the incoming arg-pointer. */
1274 size = GET_MODE_SIZE (Pmode);
1276 /* The second value is the structure value address unless this is
1277 passed as an "invisible" first argument. */
1278 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1279 size += GET_MODE_SIZE (Pmode);
1281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1282 if (FUNCTION_ARG_REGNO_P (regno))
1284 mode = reg_raw_mode[regno];
1286 gcc_assert (mode != VOIDmode);
1288 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1289 if (size % align != 0)
1290 size = CEIL (size, align) * align;
1291 size += GET_MODE_SIZE (mode);
1292 apply_args_mode[regno] = mode;
1294 else
1296 apply_args_mode[regno] = VOIDmode;
1299 return size;
1302 /* Return the size required for the block returned by __builtin_apply,
1303 and initialize apply_result_mode. */
1305 static int
1306 apply_result_size (void)
1308 static int size = -1;
1309 int align, regno;
1310 enum machine_mode mode;
1312 /* The values computed by this function never change. */
1313 if (size < 0)
1315 size = 0;
1317 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1318 if (FUNCTION_VALUE_REGNO_P (regno))
1320 mode = reg_raw_mode[regno];
1322 gcc_assert (mode != VOIDmode);
1324 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1325 if (size % align != 0)
1326 size = CEIL (size, align) * align;
1327 size += GET_MODE_SIZE (mode);
1328 apply_result_mode[regno] = mode;
1330 else
1331 apply_result_mode[regno] = VOIDmode;
1333 /* Allow targets that use untyped_call and untyped_return to override
1334 the size so that machine-specific information can be stored here. */
1335 #ifdef APPLY_RESULT_SIZE
1336 size = APPLY_RESULT_SIZE;
1337 #endif
1339 return size;
1342 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1343 /* Create a vector describing the result block RESULT. If SAVEP is true,
1344 the result block is used to save the values; otherwise it is used to
1345 restore the values. */
1347 static rtx
1348 result_vector (int savep, rtx result)
1350 int regno, size, align, nelts;
1351 enum machine_mode mode;
1352 rtx reg, mem;
1353 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1355 size = nelts = 0;
1356 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1357 if ((mode = apply_result_mode[regno]) != VOIDmode)
1359 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1360 if (size % align != 0)
1361 size = CEIL (size, align) * align;
1362 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1363 mem = adjust_address (result, mode, size);
1364 savevec[nelts++] = (savep
1365 ? gen_rtx_SET (VOIDmode, mem, reg)
1366 : gen_rtx_SET (VOIDmode, reg, mem));
1367 size += GET_MODE_SIZE (mode);
1369 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1371 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1373 /* Save the state required to perform an untyped call with the same
1374 arguments as were passed to the current function. */
1376 static rtx
1377 expand_builtin_apply_args_1 (void)
1379 rtx registers, tem;
1380 int size, align, regno;
1381 enum machine_mode mode;
1382 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1384 /* Create a block where the arg-pointer, structure value address,
1385 and argument registers can be saved. */
1386 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1388 /* Walk past the arg-pointer and structure value address. */
1389 size = GET_MODE_SIZE (Pmode);
1390 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1391 size += GET_MODE_SIZE (Pmode);
1393 /* Save each register used in calling a function to the block. */
1394 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1395 if ((mode = apply_args_mode[regno]) != VOIDmode)
1397 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1398 if (size % align != 0)
1399 size = CEIL (size, align) * align;
1401 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1403 emit_move_insn (adjust_address (registers, mode, size), tem);
1404 size += GET_MODE_SIZE (mode);
1407 /* Save the arg pointer to the block. */
1408 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1409 #ifdef STACK_GROWS_DOWNWARD
1410 /* We need the pointer as the caller actually passed them to us, not
1411 as we might have pretended they were passed. Make sure it's a valid
1412 operand, as emit_move_insn isn't expected to handle a PLUS. */
1414 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1415 NULL_RTX);
1416 #endif
1417 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1419 size = GET_MODE_SIZE (Pmode);
1421 /* Save the structure value address unless this is passed as an
1422 "invisible" first argument. */
1423 if (struct_incoming_value)
1425 emit_move_insn (adjust_address (registers, Pmode, size),
1426 copy_to_reg (struct_incoming_value));
1427 size += GET_MODE_SIZE (Pmode);
1430 /* Return the address of the block. */
1431 return copy_addr_to_reg (XEXP (registers, 0));
1434 /* __builtin_apply_args returns block of memory allocated on
1435 the stack into which is stored the arg pointer, structure
1436 value address, static chain, and all the registers that might
1437 possibly be used in performing a function call. The code is
1438 moved to the start of the function so the incoming values are
1439 saved. */
1441 static rtx
1442 expand_builtin_apply_args (void)
1444 /* Don't do __builtin_apply_args more than once in a function.
1445 Save the result of the first call and reuse it. */
1446 if (apply_args_value != 0)
1447 return apply_args_value;
1449 /* When this function is called, it means that registers must be
1450 saved on entry to this function. So we migrate the
1451 call to the first insn of this function. */
1452 rtx temp;
1453 rtx seq;
1455 start_sequence ();
1456 temp = expand_builtin_apply_args_1 ();
1457 seq = get_insns ();
1458 end_sequence ();
1460 apply_args_value = temp;
1462 /* Put the insns after the NOTE that starts the function.
1463 If this is inside a start_sequence, make the outer-level insn
1464 chain current, so the code is placed at the start of the
1465 function. If internal_arg_pointer is a non-virtual pseudo,
1466 it needs to be placed after the function that initializes
1467 that pseudo. */
1468 push_topmost_sequence ();
1469 if (REG_P (crtl->args.internal_arg_pointer)
1470 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1471 emit_insn_before (seq, parm_birth_insn);
1472 else
1473 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1474 pop_topmost_sequence ();
1475 return temp;
1479 /* Perform an untyped call and save the state required to perform an
1480 untyped return of whatever value was returned by the given function. */
1482 static rtx
1483 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1485 int size, align, regno;
1486 enum machine_mode mode;
1487 rtx incoming_args, result, reg, dest, src, call_insn;
1488 rtx old_stack_level = 0;
1489 rtx call_fusage = 0;
1490 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1492 arguments = convert_memory_address (Pmode, arguments);
1494 /* Create a block where the return registers can be saved. */
1495 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1497 /* Fetch the arg pointer from the ARGUMENTS block. */
1498 incoming_args = gen_reg_rtx (Pmode);
1499 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1500 #ifndef STACK_GROWS_DOWNWARD
1501 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1502 incoming_args, 0, OPTAB_LIB_WIDEN);
1503 #endif
1505 /* Push a new argument block and copy the arguments. Do not allow
1506 the (potential) memcpy call below to interfere with our stack
1507 manipulations. */
1508 do_pending_stack_adjust ();
1509 NO_DEFER_POP;
1511 /* Save the stack with nonlocal if available. */
1512 #ifdef HAVE_save_stack_nonlocal
1513 if (HAVE_save_stack_nonlocal)
1514 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1515 else
1516 #endif
1517 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1519 /* Allocate a block of memory onto the stack and copy the memory
1520 arguments to the outgoing arguments address. */
1521 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1523 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1524 may have already set current_function_calls_alloca to true.
1525 current_function_calls_alloca won't be set if argsize is zero,
1526 so we have to guarantee need_drap is true here. */
1527 if (SUPPORTS_STACK_ALIGNMENT)
1528 crtl->need_drap = true;
1530 dest = virtual_outgoing_args_rtx;
1531 #ifndef STACK_GROWS_DOWNWARD
1532 if (CONST_INT_P (argsize))
1533 dest = plus_constant (dest, -INTVAL (argsize));
1534 else
1535 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1536 #endif
1537 dest = gen_rtx_MEM (BLKmode, dest);
1538 set_mem_align (dest, PARM_BOUNDARY);
1539 src = gen_rtx_MEM (BLKmode, incoming_args);
1540 set_mem_align (src, PARM_BOUNDARY);
1541 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1543 /* Refer to the argument block. */
1544 apply_args_size ();
1545 arguments = gen_rtx_MEM (BLKmode, arguments);
1546 set_mem_align (arguments, PARM_BOUNDARY);
1548 /* Walk past the arg-pointer and structure value address. */
1549 size = GET_MODE_SIZE (Pmode);
1550 if (struct_value)
1551 size += GET_MODE_SIZE (Pmode);
1553 /* Restore each of the registers previously saved. Make USE insns
1554 for each of these registers for use in making the call. */
1555 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1556 if ((mode = apply_args_mode[regno]) != VOIDmode)
1558 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1559 if (size % align != 0)
1560 size = CEIL (size, align) * align;
1561 reg = gen_rtx_REG (mode, regno);
1562 emit_move_insn (reg, adjust_address (arguments, mode, size));
1563 use_reg (&call_fusage, reg);
1564 size += GET_MODE_SIZE (mode);
1567 /* Restore the structure value address unless this is passed as an
1568 "invisible" first argument. */
1569 size = GET_MODE_SIZE (Pmode);
1570 if (struct_value)
1572 rtx value = gen_reg_rtx (Pmode);
1573 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1574 emit_move_insn (struct_value, value);
1575 if (REG_P (struct_value))
1576 use_reg (&call_fusage, struct_value);
1577 size += GET_MODE_SIZE (Pmode);
1580 /* All arguments and registers used for the call are set up by now! */
1581 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1583 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1584 and we don't want to load it into a register as an optimization,
1585 because prepare_call_address already did it if it should be done. */
1586 if (GET_CODE (function) != SYMBOL_REF)
1587 function = memory_address (FUNCTION_MODE, function);
1589 /* Generate the actual call instruction and save the return value. */
1590 #ifdef HAVE_untyped_call
1591 if (HAVE_untyped_call)
1592 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1593 result, result_vector (1, result)));
1594 else
1595 #endif
1596 #ifdef HAVE_call_value
1597 if (HAVE_call_value)
1599 rtx valreg = 0;
1601 /* Locate the unique return register. It is not possible to
1602 express a call that sets more than one return register using
1603 call_value; use untyped_call for that. In fact, untyped_call
1604 only needs to save the return registers in the given block. */
1605 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1606 if ((mode = apply_result_mode[regno]) != VOIDmode)
1608 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1610 valreg = gen_rtx_REG (mode, regno);
1613 emit_call_insn (GEN_CALL_VALUE (valreg,
1614 gen_rtx_MEM (FUNCTION_MODE, function),
1615 const0_rtx, NULL_RTX, const0_rtx));
1617 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1619 else
1620 #endif
1621 gcc_unreachable ();
1623 /* Find the CALL insn we just emitted, and attach the register usage
1624 information. */
1625 call_insn = last_call_insn ();
1626 add_function_usage_to (call_insn, call_fusage);
1628 /* Restore the stack. */
1629 #ifdef HAVE_save_stack_nonlocal
1630 if (HAVE_save_stack_nonlocal)
1631 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1632 else
1633 #endif
1634 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1636 OK_DEFER_POP;
1638 /* Return the address of the result block. */
1639 result = copy_addr_to_reg (XEXP (result, 0));
1640 return convert_memory_address (ptr_mode, result);
1643 /* Perform an untyped return. */
1645 static void
1646 expand_builtin_return (rtx result)
1648 int size, align, regno;
1649 enum machine_mode mode;
1650 rtx reg;
1651 rtx call_fusage = 0;
1653 result = convert_memory_address (Pmode, result);
1655 apply_result_size ();
1656 result = gen_rtx_MEM (BLKmode, result);
1658 #ifdef HAVE_untyped_return
1659 if (HAVE_untyped_return)
1661 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1662 emit_barrier ();
1663 return;
1665 #endif
1667 /* Restore the return value and note that each value is used. */
1668 size = 0;
1669 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1670 if ((mode = apply_result_mode[regno]) != VOIDmode)
1672 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1673 if (size % align != 0)
1674 size = CEIL (size, align) * align;
1675 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1676 emit_move_insn (reg, adjust_address (result, mode, size));
1678 push_to_sequence (call_fusage);
1679 emit_use (reg);
1680 call_fusage = get_insns ();
1681 end_sequence ();
1682 size += GET_MODE_SIZE (mode);
1685 /* Put the USE insns before the return. */
1686 emit_insn (call_fusage);
1688 /* Return whatever values was restored by jumping directly to the end
1689 of the function. */
1690 expand_naked_return ();
1693 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1695 static enum type_class
1696 type_to_class (tree type)
1698 switch (TREE_CODE (type))
1700 case VOID_TYPE: return void_type_class;
1701 case INTEGER_TYPE: return integer_type_class;
1702 case ENUMERAL_TYPE: return enumeral_type_class;
1703 case BOOLEAN_TYPE: return boolean_type_class;
1704 case POINTER_TYPE: return pointer_type_class;
1705 case REFERENCE_TYPE: return reference_type_class;
1706 case OFFSET_TYPE: return offset_type_class;
1707 case REAL_TYPE: return real_type_class;
1708 case COMPLEX_TYPE: return complex_type_class;
1709 case FUNCTION_TYPE: return function_type_class;
1710 case METHOD_TYPE: return method_type_class;
1711 case RECORD_TYPE: return record_type_class;
1712 case UNION_TYPE:
1713 case QUAL_UNION_TYPE: return union_type_class;
1714 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1715 ? string_type_class : array_type_class);
1716 case LANG_TYPE: return lang_type_class;
1717 default: return no_type_class;
1721 /* Expand a call EXP to __builtin_classify_type. */
1723 static rtx
1724 expand_builtin_classify_type (tree exp)
1726 if (call_expr_nargs (exp))
1727 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1728 return GEN_INT (no_type_class);
1731 /* This helper macro, meant to be used in mathfn_built_in below,
1732 determines which among a set of three builtin math functions is
1733 appropriate for a given type mode. The `F' and `L' cases are
1734 automatically generated from the `double' case. */
1735 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1736 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1737 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1738 fcodel = BUILT_IN_MATHFN##L ; break;
1739 /* Similar to above, but appends _R after any F/L suffix. */
1740 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1741 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1742 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1743 fcodel = BUILT_IN_MATHFN##L_R ; break;
1745 /* Return mathematic function equivalent to FN but operating directly
1746 on TYPE, if available. If IMPLICIT is true find the function in
1747 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1748 can't do the conversion, return zero. */
1750 static tree
1751 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1753 tree const *const fn_arr
1754 = implicit ? implicit_built_in_decls : built_in_decls;
1755 enum built_in_function fcode, fcodef, fcodel;
1757 switch (fn)
1759 CASE_MATHFN (BUILT_IN_ACOS)
1760 CASE_MATHFN (BUILT_IN_ACOSH)
1761 CASE_MATHFN (BUILT_IN_ASIN)
1762 CASE_MATHFN (BUILT_IN_ASINH)
1763 CASE_MATHFN (BUILT_IN_ATAN)
1764 CASE_MATHFN (BUILT_IN_ATAN2)
1765 CASE_MATHFN (BUILT_IN_ATANH)
1766 CASE_MATHFN (BUILT_IN_CBRT)
1767 CASE_MATHFN (BUILT_IN_CEIL)
1768 CASE_MATHFN (BUILT_IN_CEXPI)
1769 CASE_MATHFN (BUILT_IN_COPYSIGN)
1770 CASE_MATHFN (BUILT_IN_COS)
1771 CASE_MATHFN (BUILT_IN_COSH)
1772 CASE_MATHFN (BUILT_IN_DREM)
1773 CASE_MATHFN (BUILT_IN_ERF)
1774 CASE_MATHFN (BUILT_IN_ERFC)
1775 CASE_MATHFN (BUILT_IN_EXP)
1776 CASE_MATHFN (BUILT_IN_EXP10)
1777 CASE_MATHFN (BUILT_IN_EXP2)
1778 CASE_MATHFN (BUILT_IN_EXPM1)
1779 CASE_MATHFN (BUILT_IN_FABS)
1780 CASE_MATHFN (BUILT_IN_FDIM)
1781 CASE_MATHFN (BUILT_IN_FLOOR)
1782 CASE_MATHFN (BUILT_IN_FMA)
1783 CASE_MATHFN (BUILT_IN_FMAX)
1784 CASE_MATHFN (BUILT_IN_FMIN)
1785 CASE_MATHFN (BUILT_IN_FMOD)
1786 CASE_MATHFN (BUILT_IN_FREXP)
1787 CASE_MATHFN (BUILT_IN_GAMMA)
1788 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1789 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1790 CASE_MATHFN (BUILT_IN_HYPOT)
1791 CASE_MATHFN (BUILT_IN_ILOGB)
1792 CASE_MATHFN (BUILT_IN_INF)
1793 CASE_MATHFN (BUILT_IN_ISINF)
1794 CASE_MATHFN (BUILT_IN_J0)
1795 CASE_MATHFN (BUILT_IN_J1)
1796 CASE_MATHFN (BUILT_IN_JN)
1797 CASE_MATHFN (BUILT_IN_LCEIL)
1798 CASE_MATHFN (BUILT_IN_LDEXP)
1799 CASE_MATHFN (BUILT_IN_LFLOOR)
1800 CASE_MATHFN (BUILT_IN_LGAMMA)
1801 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1802 CASE_MATHFN (BUILT_IN_LLCEIL)
1803 CASE_MATHFN (BUILT_IN_LLFLOOR)
1804 CASE_MATHFN (BUILT_IN_LLRINT)
1805 CASE_MATHFN (BUILT_IN_LLROUND)
1806 CASE_MATHFN (BUILT_IN_LOG)
1807 CASE_MATHFN (BUILT_IN_LOG10)
1808 CASE_MATHFN (BUILT_IN_LOG1P)
1809 CASE_MATHFN (BUILT_IN_LOG2)
1810 CASE_MATHFN (BUILT_IN_LOGB)
1811 CASE_MATHFN (BUILT_IN_LRINT)
1812 CASE_MATHFN (BUILT_IN_LROUND)
1813 CASE_MATHFN (BUILT_IN_MODF)
1814 CASE_MATHFN (BUILT_IN_NAN)
1815 CASE_MATHFN (BUILT_IN_NANS)
1816 CASE_MATHFN (BUILT_IN_NEARBYINT)
1817 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1818 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1819 CASE_MATHFN (BUILT_IN_POW)
1820 CASE_MATHFN (BUILT_IN_POWI)
1821 CASE_MATHFN (BUILT_IN_POW10)
1822 CASE_MATHFN (BUILT_IN_REMAINDER)
1823 CASE_MATHFN (BUILT_IN_REMQUO)
1824 CASE_MATHFN (BUILT_IN_RINT)
1825 CASE_MATHFN (BUILT_IN_ROUND)
1826 CASE_MATHFN (BUILT_IN_SCALB)
1827 CASE_MATHFN (BUILT_IN_SCALBLN)
1828 CASE_MATHFN (BUILT_IN_SCALBN)
1829 CASE_MATHFN (BUILT_IN_SIGNBIT)
1830 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1831 CASE_MATHFN (BUILT_IN_SIN)
1832 CASE_MATHFN (BUILT_IN_SINCOS)
1833 CASE_MATHFN (BUILT_IN_SINH)
1834 CASE_MATHFN (BUILT_IN_SQRT)
1835 CASE_MATHFN (BUILT_IN_TAN)
1836 CASE_MATHFN (BUILT_IN_TANH)
1837 CASE_MATHFN (BUILT_IN_TGAMMA)
1838 CASE_MATHFN (BUILT_IN_TRUNC)
1839 CASE_MATHFN (BUILT_IN_Y0)
1840 CASE_MATHFN (BUILT_IN_Y1)
1841 CASE_MATHFN (BUILT_IN_YN)
1843 default:
1844 return NULL_TREE;
1847 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1848 return fn_arr[fcode];
1849 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1850 return fn_arr[fcodef];
1851 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1852 return fn_arr[fcodel];
1853 else
1854 return NULL_TREE;
1857 /* Like mathfn_built_in_1(), but always use the implicit array. */
1859 tree
1860 mathfn_built_in (tree type, enum built_in_function fn)
1862 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1865 /* If errno must be maintained, expand the RTL to check if the result,
1866 TARGET, of a built-in function call, EXP, is NaN, and if so set
1867 errno to EDOM. */
1869 static void
1870 expand_errno_check (tree exp, rtx target)
1872 rtx lab = gen_label_rtx ();
1874 /* Test the result; if it is NaN, set errno=EDOM because
1875 the argument was not in the domain. */
1876 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1877 NULL_RTX, NULL_RTX, lab);
1879 #ifdef TARGET_EDOM
1880 /* If this built-in doesn't throw an exception, set errno directly. */
1881 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1883 #ifdef GEN_ERRNO_RTX
1884 rtx errno_rtx = GEN_ERRNO_RTX;
1885 #else
1886 rtx errno_rtx
1887 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1888 #endif
1889 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1890 emit_label (lab);
1891 return;
1893 #endif
1895 /* Make sure the library call isn't expanded as a tail call. */
1896 CALL_EXPR_TAILCALL (exp) = 0;
1898 /* We can't set errno=EDOM directly; let the library call do it.
1899 Pop the arguments right away in case the call gets deleted. */
1900 NO_DEFER_POP;
1901 expand_call (exp, target, 0);
1902 OK_DEFER_POP;
1903 emit_label (lab);
1906 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1907 Return NULL_RTX if a normal call should be emitted rather than expanding
1908 the function in-line. EXP is the expression that is a call to the builtin
1909 function; if convenient, the result should be placed in TARGET.
1910 SUBTARGET may be used as the target for computing one of EXP's operands. */
1912 static rtx
1913 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1915 optab builtin_optab;
1916 rtx op0, insns;
1917 tree fndecl = get_callee_fndecl (exp);
1918 enum machine_mode mode;
1919 bool errno_set = false;
1920 tree arg;
1922 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1923 return NULL_RTX;
1925 arg = CALL_EXPR_ARG (exp, 0);
1927 switch (DECL_FUNCTION_CODE (fndecl))
1929 CASE_FLT_FN (BUILT_IN_SQRT):
1930 errno_set = ! tree_expr_nonnegative_p (arg);
1931 builtin_optab = sqrt_optab;
1932 break;
1933 CASE_FLT_FN (BUILT_IN_EXP):
1934 errno_set = true; builtin_optab = exp_optab; break;
1935 CASE_FLT_FN (BUILT_IN_EXP10):
1936 CASE_FLT_FN (BUILT_IN_POW10):
1937 errno_set = true; builtin_optab = exp10_optab; break;
1938 CASE_FLT_FN (BUILT_IN_EXP2):
1939 errno_set = true; builtin_optab = exp2_optab; break;
1940 CASE_FLT_FN (BUILT_IN_EXPM1):
1941 errno_set = true; builtin_optab = expm1_optab; break;
1942 CASE_FLT_FN (BUILT_IN_LOGB):
1943 errno_set = true; builtin_optab = logb_optab; break;
1944 CASE_FLT_FN (BUILT_IN_LOG):
1945 errno_set = true; builtin_optab = log_optab; break;
1946 CASE_FLT_FN (BUILT_IN_LOG10):
1947 errno_set = true; builtin_optab = log10_optab; break;
1948 CASE_FLT_FN (BUILT_IN_LOG2):
1949 errno_set = true; builtin_optab = log2_optab; break;
1950 CASE_FLT_FN (BUILT_IN_LOG1P):
1951 errno_set = true; builtin_optab = log1p_optab; break;
1952 CASE_FLT_FN (BUILT_IN_ASIN):
1953 builtin_optab = asin_optab; break;
1954 CASE_FLT_FN (BUILT_IN_ACOS):
1955 builtin_optab = acos_optab; break;
1956 CASE_FLT_FN (BUILT_IN_TAN):
1957 builtin_optab = tan_optab; break;
1958 CASE_FLT_FN (BUILT_IN_ATAN):
1959 builtin_optab = atan_optab; break;
1960 CASE_FLT_FN (BUILT_IN_FLOOR):
1961 builtin_optab = floor_optab; break;
1962 CASE_FLT_FN (BUILT_IN_CEIL):
1963 builtin_optab = ceil_optab; break;
1964 CASE_FLT_FN (BUILT_IN_TRUNC):
1965 builtin_optab = btrunc_optab; break;
1966 CASE_FLT_FN (BUILT_IN_ROUND):
1967 builtin_optab = round_optab; break;
1968 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1969 builtin_optab = nearbyint_optab;
1970 if (flag_trapping_math)
1971 break;
1972 /* Else fallthrough and expand as rint. */
1973 CASE_FLT_FN (BUILT_IN_RINT):
1974 builtin_optab = rint_optab; break;
1975 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
1976 builtin_optab = significand_optab; break;
1977 default:
1978 gcc_unreachable ();
1981 /* Make a suitable register to place result in. */
1982 mode = TYPE_MODE (TREE_TYPE (exp));
1984 if (! flag_errno_math || ! HONOR_NANS (mode))
1985 errno_set = false;
1987 /* Before working hard, check whether the instruction is available. */
1988 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1990 target = gen_reg_rtx (mode);
1992 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1993 need to expand the argument again. This way, we will not perform
1994 side-effects more the once. */
1995 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1997 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1999 start_sequence ();
2001 /* Compute into TARGET.
2002 Set TARGET to wherever the result comes back. */
2003 target = expand_unop (mode, builtin_optab, op0, target, 0);
2005 if (target != 0)
2007 if (errno_set)
2008 expand_errno_check (exp, target);
2010 /* Output the entire sequence. */
2011 insns = get_insns ();
2012 end_sequence ();
2013 emit_insn (insns);
2014 return target;
2017 /* If we were unable to expand via the builtin, stop the sequence
2018 (without outputting the insns) and call to the library function
2019 with the stabilized argument list. */
2020 end_sequence ();
2023 return expand_call (exp, target, target == const0_rtx);
2026 /* Expand a call to the builtin binary math functions (pow and atan2).
2027 Return NULL_RTX if a normal call should be emitted rather than expanding the
2028 function in-line. EXP is the expression that is a call to the builtin
2029 function; if convenient, the result should be placed in TARGET.
2030 SUBTARGET may be used as the target for computing one of EXP's
2031 operands. */
2033 static rtx
2034 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2036 optab builtin_optab;
2037 rtx op0, op1, insns;
2038 int op1_type = REAL_TYPE;
2039 tree fndecl = get_callee_fndecl (exp);
2040 tree arg0, arg1;
2041 enum machine_mode mode;
2042 bool errno_set = true;
2044 switch (DECL_FUNCTION_CODE (fndecl))
2046 CASE_FLT_FN (BUILT_IN_SCALBN):
2047 CASE_FLT_FN (BUILT_IN_SCALBLN):
2048 CASE_FLT_FN (BUILT_IN_LDEXP):
2049 op1_type = INTEGER_TYPE;
2050 default:
2051 break;
2054 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2055 return NULL_RTX;
2057 arg0 = CALL_EXPR_ARG (exp, 0);
2058 arg1 = CALL_EXPR_ARG (exp, 1);
2060 switch (DECL_FUNCTION_CODE (fndecl))
2062 CASE_FLT_FN (BUILT_IN_POW):
2063 builtin_optab = pow_optab; break;
2064 CASE_FLT_FN (BUILT_IN_ATAN2):
2065 builtin_optab = atan2_optab; break;
2066 CASE_FLT_FN (BUILT_IN_SCALB):
2067 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2068 return 0;
2069 builtin_optab = scalb_optab; break;
2070 CASE_FLT_FN (BUILT_IN_SCALBN):
2071 CASE_FLT_FN (BUILT_IN_SCALBLN):
2072 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2073 return 0;
2074 /* Fall through... */
2075 CASE_FLT_FN (BUILT_IN_LDEXP):
2076 builtin_optab = ldexp_optab; break;
2077 CASE_FLT_FN (BUILT_IN_FMOD):
2078 builtin_optab = fmod_optab; break;
2079 CASE_FLT_FN (BUILT_IN_REMAINDER):
2080 CASE_FLT_FN (BUILT_IN_DREM):
2081 builtin_optab = remainder_optab; break;
2082 default:
2083 gcc_unreachable ();
2086 /* Make a suitable register to place result in. */
2087 mode = TYPE_MODE (TREE_TYPE (exp));
2089 /* Before working hard, check whether the instruction is available. */
2090 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2091 return NULL_RTX;
2093 target = gen_reg_rtx (mode);
2095 if (! flag_errno_math || ! HONOR_NANS (mode))
2096 errno_set = false;
2098 /* Always stabilize the argument list. */
2099 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2100 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2102 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2103 op1 = expand_normal (arg1);
2105 start_sequence ();
2107 /* Compute into TARGET.
2108 Set TARGET to wherever the result comes back. */
2109 target = expand_binop (mode, builtin_optab, op0, op1,
2110 target, 0, OPTAB_DIRECT);
2112 /* If we were unable to expand via the builtin, stop the sequence
2113 (without outputting the insns) and call to the library function
2114 with the stabilized argument list. */
2115 if (target == 0)
2117 end_sequence ();
2118 return expand_call (exp, target, target == const0_rtx);
2121 if (errno_set)
2122 expand_errno_check (exp, target);
2124 /* Output the entire sequence. */
2125 insns = get_insns ();
2126 end_sequence ();
2127 emit_insn (insns);
2129 return target;
2132 /* Expand a call to the builtin sin and cos math functions.
2133 Return NULL_RTX if a normal call should be emitted rather than expanding the
2134 function in-line. EXP is the expression that is a call to the builtin
2135 function; if convenient, the result should be placed in TARGET.
2136 SUBTARGET may be used as the target for computing one of EXP's
2137 operands. */
2139 static rtx
2140 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2142 optab builtin_optab;
2143 rtx op0, insns;
2144 tree fndecl = get_callee_fndecl (exp);
2145 enum machine_mode mode;
2146 tree arg;
2148 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2149 return NULL_RTX;
2151 arg = CALL_EXPR_ARG (exp, 0);
2153 switch (DECL_FUNCTION_CODE (fndecl))
2155 CASE_FLT_FN (BUILT_IN_SIN):
2156 CASE_FLT_FN (BUILT_IN_COS):
2157 builtin_optab = sincos_optab; break;
2158 default:
2159 gcc_unreachable ();
2162 /* Make a suitable register to place result in. */
2163 mode = TYPE_MODE (TREE_TYPE (exp));
2165 /* Check if sincos insn is available, otherwise fallback
2166 to sin or cos insn. */
2167 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2168 switch (DECL_FUNCTION_CODE (fndecl))
2170 CASE_FLT_FN (BUILT_IN_SIN):
2171 builtin_optab = sin_optab; break;
2172 CASE_FLT_FN (BUILT_IN_COS):
2173 builtin_optab = cos_optab; break;
2174 default:
2175 gcc_unreachable ();
2178 /* Before working hard, check whether the instruction is available. */
2179 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2181 target = gen_reg_rtx (mode);
2183 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2184 need to expand the argument again. This way, we will not perform
2185 side-effects more the once. */
2186 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2188 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2190 start_sequence ();
2192 /* Compute into TARGET.
2193 Set TARGET to wherever the result comes back. */
2194 if (builtin_optab == sincos_optab)
2196 int result;
2198 switch (DECL_FUNCTION_CODE (fndecl))
2200 CASE_FLT_FN (BUILT_IN_SIN):
2201 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2202 break;
2203 CASE_FLT_FN (BUILT_IN_COS):
2204 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2205 break;
2206 default:
2207 gcc_unreachable ();
2209 gcc_assert (result);
2211 else
2213 target = expand_unop (mode, builtin_optab, op0, target, 0);
2216 if (target != 0)
2218 /* Output the entire sequence. */
2219 insns = get_insns ();
2220 end_sequence ();
2221 emit_insn (insns);
2222 return target;
2225 /* If we were unable to expand via the builtin, stop the sequence
2226 (without outputting the insns) and call to the library function
2227 with the stabilized argument list. */
2228 end_sequence ();
2231 target = expand_call (exp, target, target == const0_rtx);
2233 return target;
2236 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2237 return an RTL instruction code that implements the functionality.
2238 If that isn't possible or available return CODE_FOR_nothing. */
2240 static enum insn_code
2241 interclass_mathfn_icode (tree arg, tree fndecl)
2243 bool errno_set = false;
2244 optab builtin_optab = 0;
2245 enum machine_mode mode;
2247 switch (DECL_FUNCTION_CODE (fndecl))
2249 CASE_FLT_FN (BUILT_IN_ILOGB):
2250 errno_set = true; builtin_optab = ilogb_optab; break;
2251 CASE_FLT_FN (BUILT_IN_ISINF):
2252 builtin_optab = isinf_optab; break;
2253 case BUILT_IN_ISNORMAL:
2254 case BUILT_IN_ISFINITE:
2255 CASE_FLT_FN (BUILT_IN_FINITE):
2256 case BUILT_IN_FINITED32:
2257 case BUILT_IN_FINITED64:
2258 case BUILT_IN_FINITED128:
2259 case BUILT_IN_ISINFD32:
2260 case BUILT_IN_ISINFD64:
2261 case BUILT_IN_ISINFD128:
2262 /* These builtins have no optabs (yet). */
2263 break;
2264 default:
2265 gcc_unreachable ();
2268 /* There's no easy way to detect the case we need to set EDOM. */
2269 if (flag_errno_math && errno_set)
2270 return CODE_FOR_nothing;
2272 /* Optab mode depends on the mode of the input argument. */
2273 mode = TYPE_MODE (TREE_TYPE (arg));
2275 if (builtin_optab)
2276 return optab_handler (builtin_optab, mode)->insn_code;
2277 return CODE_FOR_nothing;
2280 /* Expand a call to one of the builtin math functions that operate on
2281 floating point argument and output an integer result (ilogb, isinf,
2282 isnan, etc).
2283 Return 0 if a normal call should be emitted rather than expanding the
2284 function in-line. EXP is the expression that is a call to the builtin
2285 function; if convenient, the result should be placed in TARGET.
2286 SUBTARGET may be used as the target for computing one of EXP's operands. */
2288 static rtx
2289 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2291 enum insn_code icode = CODE_FOR_nothing;
2292 rtx op0;
2293 tree fndecl = get_callee_fndecl (exp);
2294 enum machine_mode mode;
2295 tree arg;
2297 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2298 return NULL_RTX;
2300 arg = CALL_EXPR_ARG (exp, 0);
2301 icode = interclass_mathfn_icode (arg, fndecl);
2302 mode = TYPE_MODE (TREE_TYPE (arg));
2304 if (icode != CODE_FOR_nothing)
2306 /* Make a suitable register to place result in. */
2307 if (!target
2308 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2309 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2311 gcc_assert (insn_data[icode].operand[0].predicate
2312 (target, GET_MODE (target)));
2314 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2315 need to expand the argument again. This way, we will not perform
2316 side-effects more the once. */
2317 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2319 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2321 if (mode != GET_MODE (op0))
2322 op0 = convert_to_mode (mode, op0, 0);
2324 /* Compute into TARGET.
2325 Set TARGET to wherever the result comes back. */
2326 emit_unop_insn (icode, target, op0, UNKNOWN);
2327 return target;
2330 return NULL_RTX;
2333 /* Expand a call to the builtin sincos math function.
2334 Return NULL_RTX if a normal call should be emitted rather than expanding the
2335 function in-line. EXP is the expression that is a call to the builtin
2336 function. */
2338 static rtx
2339 expand_builtin_sincos (tree exp)
2341 rtx op0, op1, op2, target1, target2;
2342 enum machine_mode mode;
2343 tree arg, sinp, cosp;
2344 int result;
2345 location_t loc = EXPR_LOCATION (exp);
2347 if (!validate_arglist (exp, REAL_TYPE,
2348 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2349 return NULL_RTX;
2351 arg = CALL_EXPR_ARG (exp, 0);
2352 sinp = CALL_EXPR_ARG (exp, 1);
2353 cosp = CALL_EXPR_ARG (exp, 2);
2355 /* Make a suitable register to place result in. */
2356 mode = TYPE_MODE (TREE_TYPE (arg));
2358 /* Check if sincos insn is available, otherwise emit the call. */
2359 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2360 return NULL_RTX;
2362 target1 = gen_reg_rtx (mode);
2363 target2 = gen_reg_rtx (mode);
2365 op0 = expand_normal (arg);
2366 op1 = expand_normal (build_fold_indirect_ref_loc (loc, sinp));
2367 op2 = expand_normal (build_fold_indirect_ref_loc (loc, cosp));
2369 /* Compute into target1 and target2.
2370 Set TARGET to wherever the result comes back. */
2371 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2372 gcc_assert (result);
2374 /* Move target1 and target2 to the memory locations indicated
2375 by op1 and op2. */
2376 emit_move_insn (op1, target1);
2377 emit_move_insn (op2, target2);
2379 return const0_rtx;
2382 /* Expand a call to the internal cexpi builtin to the sincos math function.
2383 EXP is the expression that is a call to the builtin function; if convenient,
2384 the result should be placed in TARGET. SUBTARGET may be used as the target
2385 for computing one of EXP's operands. */
2387 static rtx
2388 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2390 tree fndecl = get_callee_fndecl (exp);
2391 tree arg, type;
2392 enum machine_mode mode;
2393 rtx op0, op1, op2;
2394 location_t loc = EXPR_LOCATION (exp);
2396 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2397 return NULL_RTX;
2399 arg = CALL_EXPR_ARG (exp, 0);
2400 type = TREE_TYPE (arg);
2401 mode = TYPE_MODE (TREE_TYPE (arg));
2403 /* Try expanding via a sincos optab, fall back to emitting a libcall
2404 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2405 is only generated from sincos, cexp or if we have either of them. */
2406 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2408 op1 = gen_reg_rtx (mode);
2409 op2 = gen_reg_rtx (mode);
2411 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2413 /* Compute into op1 and op2. */
2414 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2416 else if (TARGET_HAS_SINCOS)
2418 tree call, fn = NULL_TREE;
2419 tree top1, top2;
2420 rtx op1a, op2a;
2422 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2423 fn = built_in_decls[BUILT_IN_SINCOSF];
2424 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2425 fn = built_in_decls[BUILT_IN_SINCOS];
2426 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2427 fn = built_in_decls[BUILT_IN_SINCOSL];
2428 else
2429 gcc_unreachable ();
2431 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2432 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2433 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2434 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2435 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2436 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2438 /* Make sure not to fold the sincos call again. */
2439 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2440 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2441 call, 3, arg, top1, top2));
2443 else
2445 tree call, fn = NULL_TREE, narg;
2446 tree ctype = build_complex_type (type);
2448 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2449 fn = built_in_decls[BUILT_IN_CEXPF];
2450 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2451 fn = built_in_decls[BUILT_IN_CEXP];
2452 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2453 fn = built_in_decls[BUILT_IN_CEXPL];
2454 else
2455 gcc_unreachable ();
2457 /* If we don't have a decl for cexp create one. This is the
2458 friendliest fallback if the user calls __builtin_cexpi
2459 without full target C99 function support. */
2460 if (fn == NULL_TREE)
2462 tree fntype;
2463 const char *name = NULL;
2465 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2466 name = "cexpf";
2467 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2468 name = "cexp";
2469 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2470 name = "cexpl";
2472 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2473 fn = build_fn_decl (name, fntype);
2476 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2477 build_real (type, dconst0), arg);
2479 /* Make sure not to fold the cexp call again. */
2480 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2481 return expand_expr (build_call_nary (ctype, call, 1, narg),
2482 target, VOIDmode, EXPAND_NORMAL);
2485 /* Now build the proper return type. */
2486 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2487 make_tree (TREE_TYPE (arg), op2),
2488 make_tree (TREE_TYPE (arg), op1)),
2489 target, VOIDmode, EXPAND_NORMAL);
2492 /* Conveniently construct a function call expression. FNDECL names the
2493 function to be called, N is the number of arguments, and the "..."
2494 parameters are the argument expressions. Unlike build_call_exr
2495 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2497 static tree
2498 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2500 va_list ap;
2501 tree fntype = TREE_TYPE (fndecl);
2502 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2504 va_start (ap, n);
2505 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2506 va_end (ap);
2507 SET_EXPR_LOCATION (fn, loc);
2508 return fn;
2510 #define build_call_nofold(...) \
2511 build_call_nofold_loc (UNKNOWN_LOCATION, __VA_ARGS__)
2513 /* Expand a call to one of the builtin rounding functions gcc defines
2514 as an extension (lfloor and lceil). As these are gcc extensions we
2515 do not need to worry about setting errno to EDOM.
2516 If expanding via optab fails, lower expression to (int)(floor(x)).
2517 EXP is the expression that is a call to the builtin function;
2518 if convenient, the result should be placed in TARGET. */
2520 static rtx
2521 expand_builtin_int_roundingfn (tree exp, rtx target)
2523 convert_optab builtin_optab;
2524 rtx op0, insns, tmp;
2525 tree fndecl = get_callee_fndecl (exp);
2526 enum built_in_function fallback_fn;
2527 tree fallback_fndecl;
2528 enum machine_mode mode;
2529 tree arg;
2531 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2532 gcc_unreachable ();
2534 arg = CALL_EXPR_ARG (exp, 0);
2536 switch (DECL_FUNCTION_CODE (fndecl))
2538 CASE_FLT_FN (BUILT_IN_LCEIL):
2539 CASE_FLT_FN (BUILT_IN_LLCEIL):
2540 builtin_optab = lceil_optab;
2541 fallback_fn = BUILT_IN_CEIL;
2542 break;
2544 CASE_FLT_FN (BUILT_IN_LFLOOR):
2545 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2546 builtin_optab = lfloor_optab;
2547 fallback_fn = BUILT_IN_FLOOR;
2548 break;
2550 default:
2551 gcc_unreachable ();
2554 /* Make a suitable register to place result in. */
2555 mode = TYPE_MODE (TREE_TYPE (exp));
2557 target = gen_reg_rtx (mode);
2559 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2560 need to expand the argument again. This way, we will not perform
2561 side-effects more the once. */
2562 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2564 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2566 start_sequence ();
2568 /* Compute into TARGET. */
2569 if (expand_sfix_optab (target, op0, builtin_optab))
2571 /* Output the entire sequence. */
2572 insns = get_insns ();
2573 end_sequence ();
2574 emit_insn (insns);
2575 return target;
2578 /* If we were unable to expand via the builtin, stop the sequence
2579 (without outputting the insns). */
2580 end_sequence ();
2582 /* Fall back to floating point rounding optab. */
2583 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2585 /* For non-C99 targets we may end up without a fallback fndecl here
2586 if the user called __builtin_lfloor directly. In this case emit
2587 a call to the floor/ceil variants nevertheless. This should result
2588 in the best user experience for not full C99 targets. */
2589 if (fallback_fndecl == NULL_TREE)
2591 tree fntype;
2592 const char *name = NULL;
2594 switch (DECL_FUNCTION_CODE (fndecl))
2596 case BUILT_IN_LCEIL:
2597 case BUILT_IN_LLCEIL:
2598 name = "ceil";
2599 break;
2600 case BUILT_IN_LCEILF:
2601 case BUILT_IN_LLCEILF:
2602 name = "ceilf";
2603 break;
2604 case BUILT_IN_LCEILL:
2605 case BUILT_IN_LLCEILL:
2606 name = "ceill";
2607 break;
2608 case BUILT_IN_LFLOOR:
2609 case BUILT_IN_LLFLOOR:
2610 name = "floor";
2611 break;
2612 case BUILT_IN_LFLOORF:
2613 case BUILT_IN_LLFLOORF:
2614 name = "floorf";
2615 break;
2616 case BUILT_IN_LFLOORL:
2617 case BUILT_IN_LLFLOORL:
2618 name = "floorl";
2619 break;
2620 default:
2621 gcc_unreachable ();
2624 fntype = build_function_type_list (TREE_TYPE (arg),
2625 TREE_TYPE (arg), NULL_TREE);
2626 fallback_fndecl = build_fn_decl (name, fntype);
2629 exp = build_call_nofold (fallback_fndecl, 1, arg);
2631 tmp = expand_normal (exp);
2633 /* Truncate the result of floating point optab to integer
2634 via expand_fix (). */
2635 target = gen_reg_rtx (mode);
2636 expand_fix (target, tmp, 0);
2638 return target;
2641 /* Expand a call to one of the builtin math functions doing integer
2642 conversion (lrint).
2643 Return 0 if a normal call should be emitted rather than expanding the
2644 function in-line. EXP is the expression that is a call to the builtin
2645 function; if convenient, the result should be placed in TARGET. */
2647 static rtx
2648 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2650 convert_optab builtin_optab;
2651 rtx op0, insns;
2652 tree fndecl = get_callee_fndecl (exp);
2653 tree arg;
2654 enum machine_mode mode;
2656 /* There's no easy way to detect the case we need to set EDOM. */
2657 if (flag_errno_math)
2658 return NULL_RTX;
2660 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2661 gcc_unreachable ();
2663 arg = CALL_EXPR_ARG (exp, 0);
2665 switch (DECL_FUNCTION_CODE (fndecl))
2667 CASE_FLT_FN (BUILT_IN_LRINT):
2668 CASE_FLT_FN (BUILT_IN_LLRINT):
2669 builtin_optab = lrint_optab; break;
2670 CASE_FLT_FN (BUILT_IN_LROUND):
2671 CASE_FLT_FN (BUILT_IN_LLROUND):
2672 builtin_optab = lround_optab; break;
2673 default:
2674 gcc_unreachable ();
2677 /* Make a suitable register to place result in. */
2678 mode = TYPE_MODE (TREE_TYPE (exp));
2680 target = gen_reg_rtx (mode);
2682 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2683 need to expand the argument again. This way, we will not perform
2684 side-effects more the once. */
2685 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2687 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2689 start_sequence ();
2691 if (expand_sfix_optab (target, op0, builtin_optab))
2693 /* Output the entire sequence. */
2694 insns = get_insns ();
2695 end_sequence ();
2696 emit_insn (insns);
2697 return target;
2700 /* If we were unable to expand via the builtin, stop the sequence
2701 (without outputting the insns) and call to the library function
2702 with the stabilized argument list. */
2703 end_sequence ();
2705 target = expand_call (exp, target, target == const0_rtx);
2707 return target;
2710 /* To evaluate powi(x,n), the floating point value x raised to the
2711 constant integer exponent n, we use a hybrid algorithm that
2712 combines the "window method" with look-up tables. For an
2713 introduction to exponentiation algorithms and "addition chains",
2714 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2715 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2716 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2717 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2719 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2720 multiplications to inline before calling the system library's pow
2721 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2722 so this default never requires calling pow, powf or powl. */
2724 #ifndef POWI_MAX_MULTS
2725 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2726 #endif
2728 /* The size of the "optimal power tree" lookup table. All
2729 exponents less than this value are simply looked up in the
2730 powi_table below. This threshold is also used to size the
2731 cache of pseudo registers that hold intermediate results. */
2732 #define POWI_TABLE_SIZE 256
2734 /* The size, in bits of the window, used in the "window method"
2735 exponentiation algorithm. This is equivalent to a radix of
2736 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2737 #define POWI_WINDOW_SIZE 3
2739 /* The following table is an efficient representation of an
2740 "optimal power tree". For each value, i, the corresponding
2741 value, j, in the table states than an optimal evaluation
2742 sequence for calculating pow(x,i) can be found by evaluating
2743 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2744 100 integers is given in Knuth's "Seminumerical algorithms". */
2746 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2748 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2749 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2750 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2751 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2752 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2753 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2754 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2755 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2756 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2757 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2758 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2759 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2760 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2761 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2762 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2763 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2764 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2765 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2766 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2767 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2768 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2769 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2770 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2771 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2772 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2773 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2774 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2775 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2776 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2777 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2778 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2779 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2783 /* Return the number of multiplications required to calculate
2784 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2785 subroutine of powi_cost. CACHE is an array indicating
2786 which exponents have already been calculated. */
2788 static int
2789 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2791 /* If we've already calculated this exponent, then this evaluation
2792 doesn't require any additional multiplications. */
2793 if (cache[n])
2794 return 0;
2796 cache[n] = true;
2797 return powi_lookup_cost (n - powi_table[n], cache)
2798 + powi_lookup_cost (powi_table[n], cache) + 1;
2801 /* Return the number of multiplications required to calculate
2802 powi(x,n) for an arbitrary x, given the exponent N. This
2803 function needs to be kept in sync with expand_powi below. */
2805 static int
2806 powi_cost (HOST_WIDE_INT n)
2808 bool cache[POWI_TABLE_SIZE];
2809 unsigned HOST_WIDE_INT digit;
2810 unsigned HOST_WIDE_INT val;
2811 int result;
2813 if (n == 0)
2814 return 0;
2816 /* Ignore the reciprocal when calculating the cost. */
2817 val = (n < 0) ? -n : n;
2819 /* Initialize the exponent cache. */
2820 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2821 cache[1] = true;
2823 result = 0;
2825 while (val >= POWI_TABLE_SIZE)
2827 if (val & 1)
2829 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2830 result += powi_lookup_cost (digit, cache)
2831 + POWI_WINDOW_SIZE + 1;
2832 val >>= POWI_WINDOW_SIZE;
2834 else
2836 val >>= 1;
2837 result++;
2841 return result + powi_lookup_cost (val, cache);
2844 /* Recursive subroutine of expand_powi. This function takes the array,
2845 CACHE, of already calculated exponents and an exponent N and returns
2846 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2848 static rtx
2849 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2851 unsigned HOST_WIDE_INT digit;
2852 rtx target, result;
2853 rtx op0, op1;
2855 if (n < POWI_TABLE_SIZE)
2857 if (cache[n])
2858 return cache[n];
2860 target = gen_reg_rtx (mode);
2861 cache[n] = target;
2863 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2864 op1 = expand_powi_1 (mode, powi_table[n], cache);
2866 else if (n & 1)
2868 target = gen_reg_rtx (mode);
2869 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2870 op0 = expand_powi_1 (mode, n - digit, cache);
2871 op1 = expand_powi_1 (mode, digit, cache);
2873 else
2875 target = gen_reg_rtx (mode);
2876 op0 = expand_powi_1 (mode, n >> 1, cache);
2877 op1 = op0;
2880 result = expand_mult (mode, op0, op1, target, 0);
2881 if (result != target)
2882 emit_move_insn (target, result);
2883 return target;
2886 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2887 floating point operand in mode MODE, and N is the exponent. This
2888 function needs to be kept in sync with powi_cost above. */
2890 static rtx
2891 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2893 rtx cache[POWI_TABLE_SIZE];
2894 rtx result;
2896 if (n == 0)
2897 return CONST1_RTX (mode);
2899 memset (cache, 0, sizeof (cache));
2900 cache[1] = x;
2902 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2904 /* If the original exponent was negative, reciprocate the result. */
2905 if (n < 0)
2906 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2907 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2909 return result;
2912 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2913 a normal call should be emitted rather than expanding the function
2914 in-line. EXP is the expression that is a call to the builtin
2915 function; if convenient, the result should be placed in TARGET. */
2917 static rtx
2918 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2920 tree arg0, arg1;
2921 tree fn, narg0;
2922 tree type = TREE_TYPE (exp);
2923 REAL_VALUE_TYPE cint, c, c2;
2924 HOST_WIDE_INT n;
2925 rtx op, op2;
2926 enum machine_mode mode = TYPE_MODE (type);
2928 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2929 return NULL_RTX;
2931 arg0 = CALL_EXPR_ARG (exp, 0);
2932 arg1 = CALL_EXPR_ARG (exp, 1);
2934 if (TREE_CODE (arg1) != REAL_CST
2935 || TREE_OVERFLOW (arg1))
2936 return expand_builtin_mathfn_2 (exp, target, subtarget);
2938 /* Handle constant exponents. */
2940 /* For integer valued exponents we can expand to an optimal multiplication
2941 sequence using expand_powi. */
2942 c = TREE_REAL_CST (arg1);
2943 n = real_to_integer (&c);
2944 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2945 if (real_identical (&c, &cint)
2946 && ((n >= -1 && n <= 2)
2947 || (flag_unsafe_math_optimizations
2948 && optimize_insn_for_speed_p ()
2949 && powi_cost (n) <= POWI_MAX_MULTS)))
2951 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2952 if (n != 1)
2954 op = force_reg (mode, op);
2955 op = expand_powi (op, mode, n);
2957 return op;
2960 narg0 = builtin_save_expr (arg0);
2962 /* If the exponent is not integer valued, check if it is half of an integer.
2963 In this case we can expand to sqrt (x) * x**(n/2). */
2964 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2965 if (fn != NULL_TREE)
2967 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2968 n = real_to_integer (&c2);
2969 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2970 if (real_identical (&c2, &cint)
2971 && ((flag_unsafe_math_optimizations
2972 && optimize_insn_for_speed_p ()
2973 && powi_cost (n/2) <= POWI_MAX_MULTS)
2974 || n == 1))
2976 tree call_expr = build_call_nofold (fn, 1, narg0);
2977 /* Use expand_expr in case the newly built call expression
2978 was folded to a non-call. */
2979 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2980 if (n != 1)
2982 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2983 op2 = force_reg (mode, op2);
2984 op2 = expand_powi (op2, mode, abs (n / 2));
2985 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2986 0, OPTAB_LIB_WIDEN);
2987 /* If the original exponent was negative, reciprocate the
2988 result. */
2989 if (n < 0)
2990 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2991 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2993 return op;
2997 /* Try if the exponent is a third of an integer. In this case
2998 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2999 different from pow (x, 1./3.) due to rounding and behavior
3000 with negative x we need to constrain this transformation to
3001 unsafe math and positive x or finite math. */
3002 fn = mathfn_built_in (type, BUILT_IN_CBRT);
3003 if (fn != NULL_TREE
3004 && flag_unsafe_math_optimizations
3005 && (tree_expr_nonnegative_p (arg0)
3006 || !HONOR_NANS (mode)))
3008 REAL_VALUE_TYPE dconst3;
3009 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
3010 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
3011 real_round (&c2, mode, &c2);
3012 n = real_to_integer (&c2);
3013 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3014 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3015 real_convert (&c2, mode, &c2);
3016 if (real_identical (&c2, &c)
3017 && ((optimize_insn_for_speed_p ()
3018 && powi_cost (n/3) <= POWI_MAX_MULTS)
3019 || n == 1))
3021 tree call_expr = build_call_nofold (fn, 1,narg0);
3022 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3023 if (abs (n) % 3 == 2)
3024 op = expand_simple_binop (mode, MULT, op, op, op,
3025 0, OPTAB_LIB_WIDEN);
3026 if (n != 1)
3028 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3029 op2 = force_reg (mode, op2);
3030 op2 = expand_powi (op2, mode, abs (n / 3));
3031 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3032 0, OPTAB_LIB_WIDEN);
3033 /* If the original exponent was negative, reciprocate the
3034 result. */
3035 if (n < 0)
3036 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3037 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3039 return op;
3043 /* Fall back to optab expansion. */
3044 return expand_builtin_mathfn_2 (exp, target, subtarget);
3047 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3048 a normal call should be emitted rather than expanding the function
3049 in-line. EXP is the expression that is a call to the builtin
3050 function; if convenient, the result should be placed in TARGET. */
3052 static rtx
3053 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3055 tree arg0, arg1;
3056 rtx op0, op1;
3057 enum machine_mode mode;
3058 enum machine_mode mode2;
3060 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3061 return NULL_RTX;
3063 arg0 = CALL_EXPR_ARG (exp, 0);
3064 arg1 = CALL_EXPR_ARG (exp, 1);
3065 mode = TYPE_MODE (TREE_TYPE (exp));
3067 /* Handle constant power. */
3069 if (TREE_CODE (arg1) == INTEGER_CST
3070 && !TREE_OVERFLOW (arg1))
3072 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3074 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3075 Otherwise, check the number of multiplications required. */
3076 if ((TREE_INT_CST_HIGH (arg1) == 0
3077 || TREE_INT_CST_HIGH (arg1) == -1)
3078 && ((n >= -1 && n <= 2)
3079 || (optimize_insn_for_speed_p ()
3080 && powi_cost (n) <= POWI_MAX_MULTS)))
3082 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3083 op0 = force_reg (mode, op0);
3084 return expand_powi (op0, mode, n);
3088 /* Emit a libcall to libgcc. */
3090 /* Mode of the 2nd argument must match that of an int. */
3091 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3093 if (target == NULL_RTX)
3094 target = gen_reg_rtx (mode);
3096 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3097 if (GET_MODE (op0) != mode)
3098 op0 = convert_to_mode (mode, op0, 0);
3099 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3100 if (GET_MODE (op1) != mode2)
3101 op1 = convert_to_mode (mode2, op1, 0);
3103 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3104 target, LCT_CONST, mode, 2,
3105 op0, mode, op1, mode2);
3107 return target;
3110 /* Expand expression EXP which is a call to the strlen builtin. Return
3111 NULL_RTX if we failed the caller should emit a normal call, otherwise
3112 try to get the result in TARGET, if convenient. */
3114 static rtx
3115 expand_builtin_strlen (tree exp, rtx target,
3116 enum machine_mode target_mode)
3118 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3119 return NULL_RTX;
3120 else
3122 rtx pat;
3123 tree len;
3124 tree src = CALL_EXPR_ARG (exp, 0);
3125 rtx result, src_reg, char_rtx, before_strlen;
3126 enum machine_mode insn_mode = target_mode, char_mode;
3127 enum insn_code icode = CODE_FOR_nothing;
3128 int align;
3130 /* If the length can be computed at compile-time, return it. */
3131 len = c_strlen (src, 0);
3132 if (len)
3133 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3135 /* If the length can be computed at compile-time and is constant
3136 integer, but there are side-effects in src, evaluate
3137 src for side-effects, then return len.
3138 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3139 can be optimized into: i++; x = 3; */
3140 len = c_strlen (src, 1);
3141 if (len && TREE_CODE (len) == INTEGER_CST)
3143 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3144 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3147 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3149 /* If SRC is not a pointer type, don't do this operation inline. */
3150 if (align == 0)
3151 return NULL_RTX;
3153 /* Bail out if we can't compute strlen in the right mode. */
3154 while (insn_mode != VOIDmode)
3156 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3157 if (icode != CODE_FOR_nothing)
3158 break;
3160 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3162 if (insn_mode == VOIDmode)
3163 return NULL_RTX;
3165 /* Make a place to write the result of the instruction. */
3166 result = target;
3167 if (! (result != 0
3168 && REG_P (result)
3169 && GET_MODE (result) == insn_mode
3170 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3171 result = gen_reg_rtx (insn_mode);
3173 /* Make a place to hold the source address. We will not expand
3174 the actual source until we are sure that the expansion will
3175 not fail -- there are trees that cannot be expanded twice. */
3176 src_reg = gen_reg_rtx (Pmode);
3178 /* Mark the beginning of the strlen sequence so we can emit the
3179 source operand later. */
3180 before_strlen = get_last_insn ();
3182 char_rtx = const0_rtx;
3183 char_mode = insn_data[(int) icode].operand[2].mode;
3184 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3185 char_mode))
3186 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3188 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3189 char_rtx, GEN_INT (align));
3190 if (! pat)
3191 return NULL_RTX;
3192 emit_insn (pat);
3194 /* Now that we are assured of success, expand the source. */
3195 start_sequence ();
3196 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3197 if (pat != src_reg)
3198 emit_move_insn (src_reg, pat);
3199 pat = get_insns ();
3200 end_sequence ();
3202 if (before_strlen)
3203 emit_insn_after (pat, before_strlen);
3204 else
3205 emit_insn_before (pat, get_insns ());
3207 /* Return the value in the proper mode for this function. */
3208 if (GET_MODE (result) == target_mode)
3209 target = result;
3210 else if (target != 0)
3211 convert_move (target, result, 0);
3212 else
3213 target = convert_to_mode (target_mode, result, 0);
3215 return target;
3219 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3220 bytes from constant string DATA + OFFSET and return it as target
3221 constant. */
3223 static rtx
3224 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3225 enum machine_mode mode)
3227 const char *str = (const char *) data;
3229 gcc_assert (offset >= 0
3230 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3231 <= strlen (str) + 1));
3233 return c_readstr (str + offset, mode);
3236 /* Expand a call EXP to the memcpy builtin.
3237 Return NULL_RTX if we failed, the caller should emit a normal call,
3238 otherwise try to get the result in TARGET, if convenient (and in
3239 mode MODE if that's convenient). */
3241 static rtx
3242 expand_builtin_memcpy (tree exp, rtx target)
3244 if (!validate_arglist (exp,
3245 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3246 return NULL_RTX;
3247 else
3249 tree dest = CALL_EXPR_ARG (exp, 0);
3250 tree src = CALL_EXPR_ARG (exp, 1);
3251 tree len = CALL_EXPR_ARG (exp, 2);
3252 const char *src_str;
3253 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3254 unsigned int dest_align
3255 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3256 rtx dest_mem, src_mem, dest_addr, len_rtx;
3257 HOST_WIDE_INT expected_size = -1;
3258 unsigned int expected_align = 0;
3260 /* If DEST is not a pointer type, call the normal function. */
3261 if (dest_align == 0)
3262 return NULL_RTX;
3264 /* If either SRC is not a pointer type, don't do this
3265 operation in-line. */
3266 if (src_align == 0)
3267 return NULL_RTX;
3269 if (currently_expanding_gimple_stmt)
3270 stringop_block_profile (currently_expanding_gimple_stmt,
3271 &expected_align, &expected_size);
3273 if (expected_align < dest_align)
3274 expected_align = dest_align;
3275 dest_mem = get_memory_rtx (dest, len);
3276 set_mem_align (dest_mem, dest_align);
3277 len_rtx = expand_normal (len);
3278 src_str = c_getstr (src);
3280 /* If SRC is a string constant and block move would be done
3281 by pieces, we can avoid loading the string from memory
3282 and only stored the computed constants. */
3283 if (src_str
3284 && CONST_INT_P (len_rtx)
3285 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3286 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3287 CONST_CAST (char *, src_str),
3288 dest_align, false))
3290 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3291 builtin_memcpy_read_str,
3292 CONST_CAST (char *, src_str),
3293 dest_align, false, 0);
3294 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3295 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3296 return dest_mem;
3299 src_mem = get_memory_rtx (src, len);
3300 set_mem_align (src_mem, src_align);
3302 /* Copy word part most expediently. */
3303 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3304 CALL_EXPR_TAILCALL (exp)
3305 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3306 expected_align, expected_size);
3308 if (dest_addr == 0)
3310 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3311 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3313 return dest_addr;
3317 /* Expand a call EXP to the mempcpy builtin.
3318 Return NULL_RTX if we failed; the caller should emit a normal call,
3319 otherwise try to get the result in TARGET, if convenient (and in
3320 mode MODE if that's convenient). If ENDP is 0 return the
3321 destination pointer, if ENDP is 1 return the end pointer ala
3322 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3323 stpcpy. */
3325 static rtx
3326 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3328 if (!validate_arglist (exp,
3329 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3330 return NULL_RTX;
3331 else
3333 tree dest = CALL_EXPR_ARG (exp, 0);
3334 tree src = CALL_EXPR_ARG (exp, 1);
3335 tree len = CALL_EXPR_ARG (exp, 2);
3336 return expand_builtin_mempcpy_args (dest, src, len,
3337 target, mode, /*endp=*/ 1);
3341 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3342 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3343 so that this can also be called without constructing an actual CALL_EXPR.
3344 The other arguments and return value are the same as for
3345 expand_builtin_mempcpy. */
3347 static rtx
3348 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3349 rtx target, enum machine_mode mode, int endp)
3351 /* If return value is ignored, transform mempcpy into memcpy. */
3352 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_MEMCPY])
3354 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3355 tree result = build_call_nofold (fn, 3, dest, src, len);
3356 return expand_expr (result, target, mode, EXPAND_NORMAL);
3358 else
3360 const char *src_str;
3361 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3362 unsigned int dest_align
3363 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3364 rtx dest_mem, src_mem, len_rtx;
3366 /* If either SRC or DEST is not a pointer type, don't do this
3367 operation in-line. */
3368 if (dest_align == 0 || src_align == 0)
3369 return NULL_RTX;
3371 /* If LEN is not constant, call the normal function. */
3372 if (! host_integerp (len, 1))
3373 return NULL_RTX;
3375 len_rtx = expand_normal (len);
3376 src_str = c_getstr (src);
3378 /* If SRC is a string constant and block move would be done
3379 by pieces, we can avoid loading the string from memory
3380 and only stored the computed constants. */
3381 if (src_str
3382 && CONST_INT_P (len_rtx)
3383 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3384 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3385 CONST_CAST (char *, src_str),
3386 dest_align, false))
3388 dest_mem = get_memory_rtx (dest, len);
3389 set_mem_align (dest_mem, dest_align);
3390 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3391 builtin_memcpy_read_str,
3392 CONST_CAST (char *, src_str),
3393 dest_align, false, endp);
3394 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3395 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3396 return dest_mem;
3399 if (CONST_INT_P (len_rtx)
3400 && can_move_by_pieces (INTVAL (len_rtx),
3401 MIN (dest_align, src_align)))
3403 dest_mem = get_memory_rtx (dest, len);
3404 set_mem_align (dest_mem, dest_align);
3405 src_mem = get_memory_rtx (src, len);
3406 set_mem_align (src_mem, src_align);
3407 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3408 MIN (dest_align, src_align), endp);
3409 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3410 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3411 return dest_mem;
3414 return NULL_RTX;
3418 #ifndef HAVE_movstr
3419 # define HAVE_movstr 0
3420 # define CODE_FOR_movstr CODE_FOR_nothing
3421 #endif
3423 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3424 we failed, the caller should emit a normal call, otherwise try to
3425 get the result in TARGET, if convenient. If ENDP is 0 return the
3426 destination pointer, if ENDP is 1 return the end pointer ala
3427 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3428 stpcpy. */
3430 static rtx
3431 expand_movstr (tree dest, tree src, rtx target, int endp)
3433 rtx end;
3434 rtx dest_mem;
3435 rtx src_mem;
3436 rtx insn;
3437 const struct insn_data * data;
3439 if (!HAVE_movstr)
3440 return NULL_RTX;
3442 dest_mem = get_memory_rtx (dest, NULL);
3443 src_mem = get_memory_rtx (src, NULL);
3444 if (!endp)
3446 target = force_reg (Pmode, XEXP (dest_mem, 0));
3447 dest_mem = replace_equiv_address (dest_mem, target);
3448 end = gen_reg_rtx (Pmode);
3450 else
3452 if (target == 0 || target == const0_rtx)
3454 end = gen_reg_rtx (Pmode);
3455 if (target == 0)
3456 target = end;
3458 else
3459 end = target;
3462 data = insn_data + CODE_FOR_movstr;
3464 if (data->operand[0].mode != VOIDmode)
3465 end = gen_lowpart (data->operand[0].mode, end);
3467 insn = data->genfun (end, dest_mem, src_mem);
3469 gcc_assert (insn);
3471 emit_insn (insn);
3473 /* movstr is supposed to set end to the address of the NUL
3474 terminator. If the caller requested a mempcpy-like return value,
3475 adjust it. */
3476 if (endp == 1 && target != const0_rtx)
3478 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3479 emit_move_insn (target, force_operand (tem, NULL_RTX));
3482 return target;
3485 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3486 NULL_RTX if we failed the caller should emit a normal call, otherwise
3487 try to get the result in TARGET, if convenient (and in mode MODE if that's
3488 convenient). */
3490 static rtx
3491 expand_builtin_strcpy (tree exp, rtx target)
3493 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3495 tree dest = CALL_EXPR_ARG (exp, 0);
3496 tree src = CALL_EXPR_ARG (exp, 1);
3497 return expand_builtin_strcpy_args (dest, src, target);
3499 return NULL_RTX;
3502 /* Helper function to do the actual work for expand_builtin_strcpy. The
3503 arguments to the builtin_strcpy call DEST and SRC are broken out
3504 so that this can also be called without constructing an actual CALL_EXPR.
3505 The other arguments and return value are the same as for
3506 expand_builtin_strcpy. */
3508 static rtx
3509 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3511 return expand_movstr (dest, src, target, /*endp=*/0);
3514 /* Expand a call EXP to the stpcpy builtin.
3515 Return NULL_RTX if we failed the caller should emit a normal call,
3516 otherwise try to get the result in TARGET, if convenient (and in
3517 mode MODE if that's convenient). */
3519 static rtx
3520 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3522 tree dst, src;
3523 location_t loc = EXPR_LOCATION (exp);
3525 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3526 return NULL_RTX;
3528 dst = CALL_EXPR_ARG (exp, 0);
3529 src = CALL_EXPR_ARG (exp, 1);
3531 /* If return value is ignored, transform stpcpy into strcpy. */
3532 if (target == const0_rtx && implicit_built_in_decls[BUILT_IN_STRCPY])
3534 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3535 tree result = build_call_nofold (fn, 2, dst, src);
3536 return expand_expr (result, target, mode, EXPAND_NORMAL);
3538 else
3540 tree len, lenp1;
3541 rtx ret;
3543 /* Ensure we get an actual string whose length can be evaluated at
3544 compile-time, not an expression containing a string. This is
3545 because the latter will potentially produce pessimized code
3546 when used to produce the return value. */
3547 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3548 return expand_movstr (dst, src, target, /*endp=*/2);
3550 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3551 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3552 target, mode, /*endp=*/2);
3554 if (ret)
3555 return ret;
3557 if (TREE_CODE (len) == INTEGER_CST)
3559 rtx len_rtx = expand_normal (len);
3561 if (CONST_INT_P (len_rtx))
3563 ret = expand_builtin_strcpy_args (dst, src, target);
3565 if (ret)
3567 if (! target)
3569 if (mode != VOIDmode)
3570 target = gen_reg_rtx (mode);
3571 else
3572 target = gen_reg_rtx (GET_MODE (ret));
3574 if (GET_MODE (target) != GET_MODE (ret))
3575 ret = gen_lowpart (GET_MODE (target), ret);
3577 ret = plus_constant (ret, INTVAL (len_rtx));
3578 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3579 gcc_assert (ret);
3581 return target;
3586 return expand_movstr (dst, src, target, /*endp=*/2);
3590 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3591 bytes from constant string DATA + OFFSET and return it as target
3592 constant. */
3595 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3596 enum machine_mode mode)
3598 const char *str = (const char *) data;
3600 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3601 return const0_rtx;
3603 return c_readstr (str + offset, mode);
3606 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3607 NULL_RTX if we failed the caller should emit a normal call. */
3609 static rtx
3610 expand_builtin_strncpy (tree exp, rtx target)
3612 location_t loc = EXPR_LOCATION (exp);
3614 if (validate_arglist (exp,
3615 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3617 tree dest = CALL_EXPR_ARG (exp, 0);
3618 tree src = CALL_EXPR_ARG (exp, 1);
3619 tree len = CALL_EXPR_ARG (exp, 2);
3620 tree slen = c_strlen (src, 1);
3622 /* We must be passed a constant len and src parameter. */
3623 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3624 return NULL_RTX;
3626 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3628 /* We're required to pad with trailing zeros if the requested
3629 len is greater than strlen(s2)+1. In that case try to
3630 use store_by_pieces, if it fails, punt. */
3631 if (tree_int_cst_lt (slen, len))
3633 unsigned int dest_align
3634 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3635 const char *p = c_getstr (src);
3636 rtx dest_mem;
3638 if (!p || dest_align == 0 || !host_integerp (len, 1)
3639 || !can_store_by_pieces (tree_low_cst (len, 1),
3640 builtin_strncpy_read_str,
3641 CONST_CAST (char *, p),
3642 dest_align, false))
3643 return NULL_RTX;
3645 dest_mem = get_memory_rtx (dest, len);
3646 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3647 builtin_strncpy_read_str,
3648 CONST_CAST (char *, p), dest_align, false, 0);
3649 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3650 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3651 return dest_mem;
3654 return NULL_RTX;
3657 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3658 bytes from constant string DATA + OFFSET and return it as target
3659 constant. */
3662 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3663 enum machine_mode mode)
3665 const char *c = (const char *) data;
3666 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3668 memset (p, *c, GET_MODE_SIZE (mode));
3670 return c_readstr (p, mode);
3673 /* Callback routine for store_by_pieces. Return the RTL of a register
3674 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3675 char value given in the RTL register data. For example, if mode is
3676 4 bytes wide, return the RTL for 0x01010101*data. */
3678 static rtx
3679 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3680 enum machine_mode mode)
3682 rtx target, coeff;
3683 size_t size;
3684 char *p;
3686 size = GET_MODE_SIZE (mode);
3687 if (size == 1)
3688 return (rtx) data;
3690 p = XALLOCAVEC (char, size);
3691 memset (p, 1, size);
3692 coeff = c_readstr (p, mode);
3694 target = convert_to_mode (mode, (rtx) data, 1);
3695 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3696 return force_reg (mode, target);
3699 /* Expand expression EXP, which is a call to the memset builtin. Return
3700 NULL_RTX if we failed the caller should emit a normal call, otherwise
3701 try to get the result in TARGET, if convenient (and in mode MODE if that's
3702 convenient). */
3704 static rtx
3705 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3707 if (!validate_arglist (exp,
3708 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3709 return NULL_RTX;
3710 else
3712 tree dest = CALL_EXPR_ARG (exp, 0);
3713 tree val = CALL_EXPR_ARG (exp, 1);
3714 tree len = CALL_EXPR_ARG (exp, 2);
3715 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3719 /* Helper function to do the actual work for expand_builtin_memset. The
3720 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3721 so that this can also be called without constructing an actual CALL_EXPR.
3722 The other arguments and return value are the same as for
3723 expand_builtin_memset. */
3725 static rtx
3726 expand_builtin_memset_args (tree dest, tree val, tree len,
3727 rtx target, enum machine_mode mode, tree orig_exp)
3729 tree fndecl, fn;
3730 enum built_in_function fcode;
3731 char c;
3732 unsigned int dest_align;
3733 rtx dest_mem, dest_addr, len_rtx;
3734 HOST_WIDE_INT expected_size = -1;
3735 unsigned int expected_align = 0;
3737 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3739 /* If DEST is not a pointer type, don't do this operation in-line. */
3740 if (dest_align == 0)
3741 return NULL_RTX;
3743 if (currently_expanding_gimple_stmt)
3744 stringop_block_profile (currently_expanding_gimple_stmt,
3745 &expected_align, &expected_size);
3747 if (expected_align < dest_align)
3748 expected_align = dest_align;
3750 /* If the LEN parameter is zero, return DEST. */
3751 if (integer_zerop (len))
3753 /* Evaluate and ignore VAL in case it has side-effects. */
3754 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3755 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3758 /* Stabilize the arguments in case we fail. */
3759 dest = builtin_save_expr (dest);
3760 val = builtin_save_expr (val);
3761 len = builtin_save_expr (len);
3763 len_rtx = expand_normal (len);
3764 dest_mem = get_memory_rtx (dest, len);
3766 if (TREE_CODE (val) != INTEGER_CST)
3768 rtx val_rtx;
3770 val_rtx = expand_normal (val);
3771 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3772 val_rtx, 0);
3774 /* Assume that we can memset by pieces if we can store
3775 * the coefficients by pieces (in the required modes).
3776 * We can't pass builtin_memset_gen_str as that emits RTL. */
3777 c = 1;
3778 if (host_integerp (len, 1)
3779 && can_store_by_pieces (tree_low_cst (len, 1),
3780 builtin_memset_read_str, &c, dest_align,
3781 true))
3783 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3784 val_rtx);
3785 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3786 builtin_memset_gen_str, val_rtx, dest_align,
3787 true, 0);
3789 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3790 dest_align, expected_align,
3791 expected_size))
3792 goto do_libcall;
3794 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3795 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3796 return dest_mem;
3799 if (target_char_cast (val, &c))
3800 goto do_libcall;
3802 if (c)
3804 if (host_integerp (len, 1)
3805 && can_store_by_pieces (tree_low_cst (len, 1),
3806 builtin_memset_read_str, &c, dest_align,
3807 true))
3808 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3809 builtin_memset_read_str, &c, dest_align, true, 0);
3810 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3811 dest_align, expected_align,
3812 expected_size))
3813 goto do_libcall;
3815 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3816 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3817 return dest_mem;
3820 set_mem_align (dest_mem, dest_align);
3821 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3822 CALL_EXPR_TAILCALL (orig_exp)
3823 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3824 expected_align, expected_size);
3826 if (dest_addr == 0)
3828 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3829 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3832 return dest_addr;
3834 do_libcall:
3835 fndecl = get_callee_fndecl (orig_exp);
3836 fcode = DECL_FUNCTION_CODE (fndecl);
3837 if (fcode == BUILT_IN_MEMSET)
3838 fn = build_call_nofold (fndecl, 3, dest, val, len);
3839 else if (fcode == BUILT_IN_BZERO)
3840 fn = build_call_nofold (fndecl, 2, dest, len);
3841 else
3842 gcc_unreachable ();
3843 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3844 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3845 return expand_call (fn, target, target == const0_rtx);
3848 /* Expand expression EXP, which is a call to the bzero builtin. Return
3849 NULL_RTX if we failed the caller should emit a normal call. */
3851 static rtx
3852 expand_builtin_bzero (tree exp)
3854 tree dest, size;
3855 location_t loc = EXPR_LOCATION (exp);
3857 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3858 return NULL_RTX;
3860 dest = CALL_EXPR_ARG (exp, 0);
3861 size = CALL_EXPR_ARG (exp, 1);
3863 /* New argument list transforming bzero(ptr x, int y) to
3864 memset(ptr x, int 0, size_t y). This is done this way
3865 so that if it isn't expanded inline, we fallback to
3866 calling bzero instead of memset. */
3868 return expand_builtin_memset_args (dest, integer_zero_node,
3869 fold_convert_loc (loc, sizetype, size),
3870 const0_rtx, VOIDmode, exp);
3873 /* Expand expression EXP, which is a call to the memcmp built-in function.
3874 Return NULL_RTX if we failed and the
3875 caller should emit a normal call, otherwise try to get the result in
3876 TARGET, if convenient (and in mode MODE, if that's convenient). */
3878 static rtx
3879 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3880 ATTRIBUTE_UNUSED enum machine_mode mode)
3882 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3884 if (!validate_arglist (exp,
3885 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3886 return NULL_RTX;
3888 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3890 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3891 rtx result;
3892 rtx insn;
3893 tree arg1 = CALL_EXPR_ARG (exp, 0);
3894 tree arg2 = CALL_EXPR_ARG (exp, 1);
3895 tree len = CALL_EXPR_ARG (exp, 2);
3897 int arg1_align
3898 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3899 int arg2_align
3900 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3901 enum machine_mode insn_mode;
3903 #ifdef HAVE_cmpmemsi
3904 if (HAVE_cmpmemsi)
3905 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3906 else
3907 #endif
3908 #ifdef HAVE_cmpstrnsi
3909 if (HAVE_cmpstrnsi)
3910 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3911 else
3912 #endif
3913 return NULL_RTX;
3915 /* If we don't have POINTER_TYPE, call the function. */
3916 if (arg1_align == 0 || arg2_align == 0)
3917 return NULL_RTX;
3919 /* Make a place to write the result of the instruction. */
3920 result = target;
3921 if (! (result != 0
3922 && REG_P (result) && GET_MODE (result) == insn_mode
3923 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3924 result = gen_reg_rtx (insn_mode);
3926 arg1_rtx = get_memory_rtx (arg1, len);
3927 arg2_rtx = get_memory_rtx (arg2, len);
3928 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3930 /* Set MEM_SIZE as appropriate. */
3931 if (CONST_INT_P (arg3_rtx))
3933 set_mem_size (arg1_rtx, arg3_rtx);
3934 set_mem_size (arg2_rtx, arg3_rtx);
3937 #ifdef HAVE_cmpmemsi
3938 if (HAVE_cmpmemsi)
3939 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3940 GEN_INT (MIN (arg1_align, arg2_align)));
3941 else
3942 #endif
3943 #ifdef HAVE_cmpstrnsi
3944 if (HAVE_cmpstrnsi)
3945 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3946 GEN_INT (MIN (arg1_align, arg2_align)));
3947 else
3948 #endif
3949 gcc_unreachable ();
3951 if (insn)
3952 emit_insn (insn);
3953 else
3954 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3955 TYPE_MODE (integer_type_node), 3,
3956 XEXP (arg1_rtx, 0), Pmode,
3957 XEXP (arg2_rtx, 0), Pmode,
3958 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3959 TYPE_UNSIGNED (sizetype)),
3960 TYPE_MODE (sizetype));
3962 /* Return the value in the proper mode for this function. */
3963 mode = TYPE_MODE (TREE_TYPE (exp));
3964 if (GET_MODE (result) == mode)
3965 return result;
3966 else if (target != 0)
3968 convert_move (target, result, 0);
3969 return target;
3971 else
3972 return convert_to_mode (mode, result, 0);
3974 #endif
3976 return NULL_RTX;
3979 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3980 if we failed the caller should emit a normal call, otherwise try to get
3981 the result in TARGET, if convenient. */
3983 static rtx
3984 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3986 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3987 return NULL_RTX;
3989 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3990 if (cmpstr_optab[SImode] != CODE_FOR_nothing
3991 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
3993 rtx arg1_rtx, arg2_rtx;
3994 rtx result, insn = NULL_RTX;
3995 tree fndecl, fn;
3996 tree arg1 = CALL_EXPR_ARG (exp, 0);
3997 tree arg2 = CALL_EXPR_ARG (exp, 1);
3999 int arg1_align
4000 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4001 int arg2_align
4002 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4004 /* If we don't have POINTER_TYPE, call the function. */
4005 if (arg1_align == 0 || arg2_align == 0)
4006 return NULL_RTX;
4008 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4009 arg1 = builtin_save_expr (arg1);
4010 arg2 = builtin_save_expr (arg2);
4012 arg1_rtx = get_memory_rtx (arg1, NULL);
4013 arg2_rtx = get_memory_rtx (arg2, NULL);
4015 #ifdef HAVE_cmpstrsi
4016 /* Try to call cmpstrsi. */
4017 if (HAVE_cmpstrsi)
4019 enum machine_mode insn_mode
4020 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4022 /* Make a place to write the result of the instruction. */
4023 result = target;
4024 if (! (result != 0
4025 && REG_P (result) && GET_MODE (result) == insn_mode
4026 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4027 result = gen_reg_rtx (insn_mode);
4029 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4030 GEN_INT (MIN (arg1_align, arg2_align)));
4032 #endif
4033 #ifdef HAVE_cmpstrnsi
4034 /* Try to determine at least one length and call cmpstrnsi. */
4035 if (!insn && HAVE_cmpstrnsi)
4037 tree len;
4038 rtx arg3_rtx;
4040 enum machine_mode insn_mode
4041 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4042 tree len1 = c_strlen (arg1, 1);
4043 tree len2 = c_strlen (arg2, 1);
4045 if (len1)
4046 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4047 if (len2)
4048 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4050 /* If we don't have a constant length for the first, use the length
4051 of the second, if we know it. We don't require a constant for
4052 this case; some cost analysis could be done if both are available
4053 but neither is constant. For now, assume they're equally cheap,
4054 unless one has side effects. If both strings have constant lengths,
4055 use the smaller. */
4057 if (!len1)
4058 len = len2;
4059 else if (!len2)
4060 len = len1;
4061 else if (TREE_SIDE_EFFECTS (len1))
4062 len = len2;
4063 else if (TREE_SIDE_EFFECTS (len2))
4064 len = len1;
4065 else if (TREE_CODE (len1) != INTEGER_CST)
4066 len = len2;
4067 else if (TREE_CODE (len2) != INTEGER_CST)
4068 len = len1;
4069 else if (tree_int_cst_lt (len1, len2))
4070 len = len1;
4071 else
4072 len = len2;
4074 /* If both arguments have side effects, we cannot optimize. */
4075 if (!len || TREE_SIDE_EFFECTS (len))
4076 goto do_libcall;
4078 arg3_rtx = expand_normal (len);
4080 /* Make a place to write the result of the instruction. */
4081 result = target;
4082 if (! (result != 0
4083 && REG_P (result) && GET_MODE (result) == insn_mode
4084 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4085 result = gen_reg_rtx (insn_mode);
4087 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4088 GEN_INT (MIN (arg1_align, arg2_align)));
4090 #endif
4092 if (insn)
4094 enum machine_mode mode;
4095 emit_insn (insn);
4097 /* Return the value in the proper mode for this function. */
4098 mode = TYPE_MODE (TREE_TYPE (exp));
4099 if (GET_MODE (result) == mode)
4100 return result;
4101 if (target == 0)
4102 return convert_to_mode (mode, result, 0);
4103 convert_move (target, result, 0);
4104 return target;
4107 /* Expand the library call ourselves using a stabilized argument
4108 list to avoid re-evaluating the function's arguments twice. */
4109 #ifdef HAVE_cmpstrnsi
4110 do_libcall:
4111 #endif
4112 fndecl = get_callee_fndecl (exp);
4113 fn = build_call_nofold (fndecl, 2, arg1, arg2);
4114 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4115 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4116 return expand_call (fn, target, target == const0_rtx);
4118 #endif
4119 return NULL_RTX;
4122 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4123 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4124 the result in TARGET, if convenient. */
4126 static rtx
4127 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4128 ATTRIBUTE_UNUSED enum machine_mode mode)
4130 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4132 if (!validate_arglist (exp,
4133 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4134 return NULL_RTX;
4136 /* If c_strlen can determine an expression for one of the string
4137 lengths, and it doesn't have side effects, then emit cmpstrnsi
4138 using length MIN(strlen(string)+1, arg3). */
4139 #ifdef HAVE_cmpstrnsi
4140 if (HAVE_cmpstrnsi)
4142 tree len, len1, len2;
4143 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4144 rtx result, insn;
4145 tree fndecl, fn;
4146 tree arg1 = CALL_EXPR_ARG (exp, 0);
4147 tree arg2 = CALL_EXPR_ARG (exp, 1);
4148 tree arg3 = CALL_EXPR_ARG (exp, 2);
4150 int arg1_align
4151 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4152 int arg2_align
4153 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4154 enum machine_mode insn_mode
4155 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4157 len1 = c_strlen (arg1, 1);
4158 len2 = c_strlen (arg2, 1);
4160 if (len1)
4161 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4162 if (len2)
4163 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4165 /* If we don't have a constant length for the first, use the length
4166 of the second, if we know it. We don't require a constant for
4167 this case; some cost analysis could be done if both are available
4168 but neither is constant. For now, assume they're equally cheap,
4169 unless one has side effects. If both strings have constant lengths,
4170 use the smaller. */
4172 if (!len1)
4173 len = len2;
4174 else if (!len2)
4175 len = len1;
4176 else if (TREE_SIDE_EFFECTS (len1))
4177 len = len2;
4178 else if (TREE_SIDE_EFFECTS (len2))
4179 len = len1;
4180 else if (TREE_CODE (len1) != INTEGER_CST)
4181 len = len2;
4182 else if (TREE_CODE (len2) != INTEGER_CST)
4183 len = len1;
4184 else if (tree_int_cst_lt (len1, len2))
4185 len = len1;
4186 else
4187 len = len2;
4189 /* If both arguments have side effects, we cannot optimize. */
4190 if (!len || TREE_SIDE_EFFECTS (len))
4191 return NULL_RTX;
4193 /* The actual new length parameter is MIN(len,arg3). */
4194 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4195 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4197 /* If we don't have POINTER_TYPE, call the function. */
4198 if (arg1_align == 0 || arg2_align == 0)
4199 return NULL_RTX;
4201 /* Make a place to write the result of the instruction. */
4202 result = target;
4203 if (! (result != 0
4204 && REG_P (result) && GET_MODE (result) == insn_mode
4205 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4206 result = gen_reg_rtx (insn_mode);
4208 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4209 arg1 = builtin_save_expr (arg1);
4210 arg2 = builtin_save_expr (arg2);
4211 len = builtin_save_expr (len);
4213 arg1_rtx = get_memory_rtx (arg1, len);
4214 arg2_rtx = get_memory_rtx (arg2, len);
4215 arg3_rtx = expand_normal (len);
4216 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4217 GEN_INT (MIN (arg1_align, arg2_align)));
4218 if (insn)
4220 emit_insn (insn);
4222 /* Return the value in the proper mode for this function. */
4223 mode = TYPE_MODE (TREE_TYPE (exp));
4224 if (GET_MODE (result) == mode)
4225 return result;
4226 if (target == 0)
4227 return convert_to_mode (mode, result, 0);
4228 convert_move (target, result, 0);
4229 return target;
4232 /* Expand the library call ourselves using a stabilized argument
4233 list to avoid re-evaluating the function's arguments twice. */
4234 fndecl = get_callee_fndecl (exp);
4235 fn = build_call_nofold (fndecl, 3, arg1, arg2, len);
4236 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4237 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4238 return expand_call (fn, target, target == const0_rtx);
4240 #endif
4241 return NULL_RTX;
4244 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4245 if that's convenient. */
4248 expand_builtin_saveregs (void)
4250 rtx val, seq;
4252 /* Don't do __builtin_saveregs more than once in a function.
4253 Save the result of the first call and reuse it. */
4254 if (saveregs_value != 0)
4255 return saveregs_value;
4257 /* When this function is called, it means that registers must be
4258 saved on entry to this function. So we migrate the call to the
4259 first insn of this function. */
4261 start_sequence ();
4263 /* Do whatever the machine needs done in this case. */
4264 val = targetm.calls.expand_builtin_saveregs ();
4266 seq = get_insns ();
4267 end_sequence ();
4269 saveregs_value = val;
4271 /* Put the insns after the NOTE that starts the function. If this
4272 is inside a start_sequence, make the outer-level insn chain current, so
4273 the code is placed at the start of the function. */
4274 push_topmost_sequence ();
4275 emit_insn_after (seq, entry_of_function ());
4276 pop_topmost_sequence ();
4278 return val;
4281 /* __builtin_args_info (N) returns word N of the arg space info
4282 for the current function. The number and meanings of words
4283 is controlled by the definition of CUMULATIVE_ARGS. */
4285 static rtx
4286 expand_builtin_args_info (tree exp)
4288 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4289 int *word_ptr = (int *) &crtl->args.info;
4291 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4293 if (call_expr_nargs (exp) != 0)
4295 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4296 error ("argument of %<__builtin_args_info%> must be constant");
4297 else
4299 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4301 if (wordnum < 0 || wordnum >= nwords)
4302 error ("argument of %<__builtin_args_info%> out of range");
4303 else
4304 return GEN_INT (word_ptr[wordnum]);
4307 else
4308 error ("missing argument in %<__builtin_args_info%>");
4310 return const0_rtx;
4313 /* Expand a call to __builtin_next_arg. */
4315 static rtx
4316 expand_builtin_next_arg (void)
4318 /* Checking arguments is already done in fold_builtin_next_arg
4319 that must be called before this function. */
4320 return expand_binop (ptr_mode, add_optab,
4321 crtl->args.internal_arg_pointer,
4322 crtl->args.arg_offset_rtx,
4323 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4326 /* Make it easier for the backends by protecting the valist argument
4327 from multiple evaluations. */
4329 static tree
4330 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4332 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4334 gcc_assert (vatype != NULL_TREE);
4336 if (TREE_CODE (vatype) == ARRAY_TYPE)
4338 if (TREE_SIDE_EFFECTS (valist))
4339 valist = save_expr (valist);
4341 /* For this case, the backends will be expecting a pointer to
4342 vatype, but it's possible we've actually been given an array
4343 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4344 So fix it. */
4345 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4347 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4348 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4351 else
4353 tree pt;
4355 if (! needs_lvalue)
4357 if (! TREE_SIDE_EFFECTS (valist))
4358 return valist;
4360 pt = build_pointer_type (vatype);
4361 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4362 TREE_SIDE_EFFECTS (valist) = 1;
4365 if (TREE_SIDE_EFFECTS (valist))
4366 valist = save_expr (valist);
4367 valist = build_fold_indirect_ref_loc (loc, valist);
4370 return valist;
4373 /* The "standard" definition of va_list is void*. */
4375 tree
4376 std_build_builtin_va_list (void)
4378 return ptr_type_node;
4381 /* The "standard" abi va_list is va_list_type_node. */
4383 tree
4384 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4386 return va_list_type_node;
4389 /* The "standard" type of va_list is va_list_type_node. */
4391 tree
4392 std_canonical_va_list_type (tree type)
4394 tree wtype, htype;
4396 if (INDIRECT_REF_P (type))
4397 type = TREE_TYPE (type);
4398 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
4399 type = TREE_TYPE (type);
4400 wtype = va_list_type_node;
4401 htype = type;
4402 /* Treat structure va_list types. */
4403 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4404 htype = TREE_TYPE (htype);
4405 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4407 /* If va_list is an array type, the argument may have decayed
4408 to a pointer type, e.g. by being passed to another function.
4409 In that case, unwrap both types so that we can compare the
4410 underlying records. */
4411 if (TREE_CODE (htype) == ARRAY_TYPE
4412 || POINTER_TYPE_P (htype))
4414 wtype = TREE_TYPE (wtype);
4415 htype = TREE_TYPE (htype);
4418 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4419 return va_list_type_node;
4421 return NULL_TREE;
4424 /* The "standard" implementation of va_start: just assign `nextarg' to
4425 the variable. */
4427 void
4428 std_expand_builtin_va_start (tree valist, rtx nextarg)
4430 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4431 convert_move (va_r, nextarg, 0);
4434 /* Expand EXP, a call to __builtin_va_start. */
4436 static rtx
4437 expand_builtin_va_start (tree exp)
4439 rtx nextarg;
4440 tree valist;
4441 location_t loc = EXPR_LOCATION (exp);
4443 if (call_expr_nargs (exp) < 2)
4445 error_at (loc, "too few arguments to function %<va_start%>");
4446 return const0_rtx;
4449 if (fold_builtin_next_arg (exp, true))
4450 return const0_rtx;
4452 nextarg = expand_builtin_next_arg ();
4453 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4455 if (targetm.expand_builtin_va_start)
4456 targetm.expand_builtin_va_start (valist, nextarg);
4457 else
4458 std_expand_builtin_va_start (valist, nextarg);
4460 return const0_rtx;
4463 /* The "standard" implementation of va_arg: read the value from the
4464 current (padded) address and increment by the (padded) size. */
4466 tree
4467 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
4468 gimple_seq *post_p)
4470 tree addr, t, type_size, rounded_size, valist_tmp;
4471 unsigned HOST_WIDE_INT align, boundary;
4472 bool indirect;
4474 #ifdef ARGS_GROW_DOWNWARD
4475 /* All of the alignment and movement below is for args-grow-up machines.
4476 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4477 implement their own specialized gimplify_va_arg_expr routines. */
4478 gcc_unreachable ();
4479 #endif
4481 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4482 if (indirect)
4483 type = build_pointer_type (type);
4485 align = PARM_BOUNDARY / BITS_PER_UNIT;
4486 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type);
4488 /* When we align parameter on stack for caller, if the parameter
4489 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4490 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4491 here with caller. */
4492 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
4493 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
4495 boundary /= BITS_PER_UNIT;
4497 /* Hoist the valist value into a temporary for the moment. */
4498 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4500 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4501 requires greater alignment, we must perform dynamic alignment. */
4502 if (boundary > align
4503 && !integer_zerop (TYPE_SIZE (type)))
4505 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4506 fold_build2 (POINTER_PLUS_EXPR,
4507 TREE_TYPE (valist),
4508 valist_tmp, size_int (boundary - 1)));
4509 gimplify_and_add (t, pre_p);
4511 t = fold_convert (sizetype, valist_tmp);
4512 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4513 fold_convert (TREE_TYPE (valist),
4514 fold_build2 (BIT_AND_EXPR, sizetype, t,
4515 size_int (-boundary))));
4516 gimplify_and_add (t, pre_p);
4518 else
4519 boundary = align;
4521 /* If the actual alignment is less than the alignment of the type,
4522 adjust the type accordingly so that we don't assume strict alignment
4523 when dereferencing the pointer. */
4524 boundary *= BITS_PER_UNIT;
4525 if (boundary < TYPE_ALIGN (type))
4527 type = build_variant_type_copy (type);
4528 TYPE_ALIGN (type) = boundary;
4531 /* Compute the rounded size of the type. */
4532 type_size = size_in_bytes (type);
4533 rounded_size = round_up (type_size, align);
4535 /* Reduce rounded_size so it's sharable with the postqueue. */
4536 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4538 /* Get AP. */
4539 addr = valist_tmp;
4540 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4542 /* Small args are padded downward. */
4543 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
4544 rounded_size, size_int (align));
4545 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4546 size_binop (MINUS_EXPR, rounded_size, type_size));
4547 addr = fold_build2 (POINTER_PLUS_EXPR,
4548 TREE_TYPE (addr), addr, t);
4551 /* Compute new value for AP. */
4552 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4553 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4554 gimplify_and_add (t, pre_p);
4556 addr = fold_convert (build_pointer_type (type), addr);
4558 if (indirect)
4559 addr = build_va_arg_indirect_ref (addr);
4561 return build_va_arg_indirect_ref (addr);
4564 /* Build an indirect-ref expression over the given TREE, which represents a
4565 piece of a va_arg() expansion. */
4566 tree
4567 build_va_arg_indirect_ref (tree addr)
4569 addr = build_fold_indirect_ref_loc (EXPR_LOCATION (addr), addr);
4571 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4572 mf_mark (addr);
4574 return addr;
4577 /* Return a dummy expression of type TYPE in order to keep going after an
4578 error. */
4580 static tree
4581 dummy_object (tree type)
4583 tree t = build_int_cst (build_pointer_type (type), 0);
4584 return build1 (INDIRECT_REF, type, t);
4587 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4588 builtin function, but a very special sort of operator. */
4590 enum gimplify_status
4591 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4593 tree promoted_type, have_va_type;
4594 tree valist = TREE_OPERAND (*expr_p, 0);
4595 tree type = TREE_TYPE (*expr_p);
4596 tree t;
4597 location_t loc = EXPR_LOCATION (*expr_p);
4599 /* Verify that valist is of the proper type. */
4600 have_va_type = TREE_TYPE (valist);
4601 if (have_va_type == error_mark_node)
4602 return GS_ERROR;
4603 have_va_type = targetm.canonical_va_list_type (have_va_type);
4605 if (have_va_type == NULL_TREE)
4607 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
4608 return GS_ERROR;
4611 /* Generate a diagnostic for requesting data of a type that cannot
4612 be passed through `...' due to type promotion at the call site. */
4613 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4614 != type)
4616 static bool gave_help;
4617 bool warned;
4619 /* Unfortunately, this is merely undefined, rather than a constraint
4620 violation, so we cannot make this an error. If this call is never
4621 executed, the program is still strictly conforming. */
4622 warned = warning_at (loc, 0,
4623 "%qT is promoted to %qT when passed through %<...%>",
4624 type, promoted_type);
4625 if (!gave_help && warned)
4627 gave_help = true;
4628 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
4629 promoted_type, type);
4632 /* We can, however, treat "undefined" any way we please.
4633 Call abort to encourage the user to fix the program. */
4634 if (warned)
4635 inform (loc, "if this code is reached, the program will abort");
4636 /* Before the abort, allow the evaluation of the va_list
4637 expression to exit or longjmp. */
4638 gimplify_and_add (valist, pre_p);
4639 t = build_call_expr_loc (loc,
4640 implicit_built_in_decls[BUILT_IN_TRAP], 0);
4641 gimplify_and_add (t, pre_p);
4643 /* This is dead code, but go ahead and finish so that the
4644 mode of the result comes out right. */
4645 *expr_p = dummy_object (type);
4646 return GS_ALL_DONE;
4648 else
4650 /* Make it easier for the backends by protecting the valist argument
4651 from multiple evaluations. */
4652 if (TREE_CODE (have_va_type) == ARRAY_TYPE)
4654 /* For this case, the backends will be expecting a pointer to
4655 TREE_TYPE (abi), but it's possible we've
4656 actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
4657 So fix it. */
4658 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4660 tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
4661 valist = fold_convert_loc (loc, p1,
4662 build_fold_addr_expr_loc (loc, valist));
4665 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4667 else
4668 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4670 if (!targetm.gimplify_va_arg_expr)
4671 /* FIXME: Once most targets are converted we should merely
4672 assert this is non-null. */
4673 return GS_ALL_DONE;
4675 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4676 return GS_OK;
4680 /* Expand EXP, a call to __builtin_va_end. */
4682 static rtx
4683 expand_builtin_va_end (tree exp)
4685 tree valist = CALL_EXPR_ARG (exp, 0);
4687 /* Evaluate for side effects, if needed. I hate macros that don't
4688 do that. */
4689 if (TREE_SIDE_EFFECTS (valist))
4690 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4692 return const0_rtx;
4695 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4696 builtin rather than just as an assignment in stdarg.h because of the
4697 nastiness of array-type va_list types. */
4699 static rtx
4700 expand_builtin_va_copy (tree exp)
4702 tree dst, src, t;
4703 location_t loc = EXPR_LOCATION (exp);
4705 dst = CALL_EXPR_ARG (exp, 0);
4706 src = CALL_EXPR_ARG (exp, 1);
4708 dst = stabilize_va_list_loc (loc, dst, 1);
4709 src = stabilize_va_list_loc (loc, src, 0);
4711 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4713 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4715 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4716 TREE_SIDE_EFFECTS (t) = 1;
4717 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4719 else
4721 rtx dstb, srcb, size;
4723 /* Evaluate to pointers. */
4724 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4725 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4726 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4727 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4729 dstb = convert_memory_address (Pmode, dstb);
4730 srcb = convert_memory_address (Pmode, srcb);
4732 /* "Dereference" to BLKmode memories. */
4733 dstb = gen_rtx_MEM (BLKmode, dstb);
4734 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4735 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4736 srcb = gen_rtx_MEM (BLKmode, srcb);
4737 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4738 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4740 /* Copy. */
4741 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4744 return const0_rtx;
4747 /* Expand a call to one of the builtin functions __builtin_frame_address or
4748 __builtin_return_address. */
4750 static rtx
4751 expand_builtin_frame_address (tree fndecl, tree exp)
4753 /* The argument must be a nonnegative integer constant.
4754 It counts the number of frames to scan up the stack.
4755 The value is the return address saved in that frame. */
4756 if (call_expr_nargs (exp) == 0)
4757 /* Warning about missing arg was already issued. */
4758 return const0_rtx;
4759 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4761 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4762 error ("invalid argument to %<__builtin_frame_address%>");
4763 else
4764 error ("invalid argument to %<__builtin_return_address%>");
4765 return const0_rtx;
4767 else
4769 rtx tem
4770 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4771 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
4773 /* Some ports cannot access arbitrary stack frames. */
4774 if (tem == NULL)
4776 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4777 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4778 else
4779 warning (0, "unsupported argument to %<__builtin_return_address%>");
4780 return const0_rtx;
4783 /* For __builtin_frame_address, return what we've got. */
4784 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4785 return tem;
4787 if (!REG_P (tem)
4788 && ! CONSTANT_P (tem))
4789 tem = copy_to_mode_reg (Pmode, tem);
4790 return tem;
4794 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4795 we failed and the caller should emit a normal call, otherwise try to get
4796 the result in TARGET, if convenient. */
4798 static rtx
4799 expand_builtin_alloca (tree exp, rtx target)
4801 rtx op0;
4802 rtx result;
4804 /* Emit normal call if marked not-inlineable. */
4805 if (CALL_CANNOT_INLINE_P (exp))
4806 return NULL_RTX;
4808 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4809 return NULL_RTX;
4811 /* Compute the argument. */
4812 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4814 /* Allocate the desired space. */
4815 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4816 result = convert_memory_address (ptr_mode, result);
4818 return result;
4821 /* Expand a call to a bswap builtin with argument ARG0. MODE
4822 is the mode to expand with. */
4824 static rtx
4825 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
4827 enum machine_mode mode;
4828 tree arg;
4829 rtx op0;
4831 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4832 return NULL_RTX;
4834 arg = CALL_EXPR_ARG (exp, 0);
4835 mode = TYPE_MODE (TREE_TYPE (arg));
4836 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4838 target = expand_unop (mode, bswap_optab, op0, target, 1);
4840 gcc_assert (target);
4842 return convert_to_mode (mode, target, 0);
4845 /* Expand a call to a unary builtin in EXP.
4846 Return NULL_RTX if a normal call should be emitted rather than expanding the
4847 function in-line. If convenient, the result should be placed in TARGET.
4848 SUBTARGET may be used as the target for computing one of EXP's operands. */
4850 static rtx
4851 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4852 rtx subtarget, optab op_optab)
4854 rtx op0;
4856 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4857 return NULL_RTX;
4859 /* Compute the argument. */
4860 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4861 VOIDmode, EXPAND_NORMAL);
4862 /* Compute op, into TARGET if possible.
4863 Set TARGET to wherever the result comes back. */
4864 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4865 op_optab, op0, target, 1);
4866 gcc_assert (target);
4868 return convert_to_mode (target_mode, target, 0);
4871 /* Expand a call to __builtin_expect. We just return our argument
4872 as the builtin_expect semantic should've been already executed by
4873 tree branch prediction pass. */
4875 static rtx
4876 expand_builtin_expect (tree exp, rtx target)
4878 tree arg;
4880 if (call_expr_nargs (exp) < 2)
4881 return const0_rtx;
4882 arg = CALL_EXPR_ARG (exp, 0);
4884 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4885 /* When guessing was done, the hints should be already stripped away. */
4886 gcc_assert (!flag_guess_branch_prob
4887 || optimize == 0 || errorcount || sorrycount);
4888 return target;
4891 void
4892 expand_builtin_trap (void)
4894 #ifdef HAVE_trap
4895 if (HAVE_trap)
4896 emit_insn (gen_trap ());
4897 else
4898 #endif
4899 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4900 emit_barrier ();
4903 /* Expand a call to __builtin_unreachable. We do nothing except emit
4904 a barrier saying that control flow will not pass here.
4906 It is the responsibility of the program being compiled to ensure
4907 that control flow does never reach __builtin_unreachable. */
4908 static void
4909 expand_builtin_unreachable (void)
4911 emit_barrier ();
4914 /* Expand EXP, a call to fabs, fabsf or fabsl.
4915 Return NULL_RTX if a normal call should be emitted rather than expanding
4916 the function inline. If convenient, the result should be placed
4917 in TARGET. SUBTARGET may be used as the target for computing
4918 the operand. */
4920 static rtx
4921 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4923 enum machine_mode mode;
4924 tree arg;
4925 rtx op0;
4927 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4928 return NULL_RTX;
4930 arg = CALL_EXPR_ARG (exp, 0);
4931 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4932 mode = TYPE_MODE (TREE_TYPE (arg));
4933 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4934 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4937 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4938 Return NULL is a normal call should be emitted rather than expanding the
4939 function inline. If convenient, the result should be placed in TARGET.
4940 SUBTARGET may be used as the target for computing the operand. */
4942 static rtx
4943 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4945 rtx op0, op1;
4946 tree arg;
4948 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4949 return NULL_RTX;
4951 arg = CALL_EXPR_ARG (exp, 0);
4952 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4954 arg = CALL_EXPR_ARG (exp, 1);
4955 op1 = expand_normal (arg);
4957 return expand_copysign (op0, op1, target);
4960 /* Create a new constant string literal and return a char* pointer to it.
4961 The STRING_CST value is the LEN characters at STR. */
4962 tree
4963 build_string_literal (int len, const char *str)
4965 tree t, elem, index, type;
4967 t = build_string (len, str);
4968 elem = build_type_variant (char_type_node, 1, 0);
4969 index = build_index_type (size_int (len - 1));
4970 type = build_array_type (elem, index);
4971 TREE_TYPE (t) = type;
4972 TREE_CONSTANT (t) = 1;
4973 TREE_READONLY (t) = 1;
4974 TREE_STATIC (t) = 1;
4976 type = build_pointer_type (elem);
4977 t = build1 (ADDR_EXPR, type,
4978 build4 (ARRAY_REF, elem,
4979 t, integer_zero_node, NULL_TREE, NULL_TREE));
4980 return t;
4983 /* Expand a call to either the entry or exit function profiler. */
4985 static rtx
4986 expand_builtin_profile_func (bool exitp)
4988 rtx this_rtx, which;
4990 this_rtx = DECL_RTL (current_function_decl);
4991 gcc_assert (MEM_P (this_rtx));
4992 this_rtx = XEXP (this_rtx, 0);
4994 if (exitp)
4995 which = profile_function_exit_libfunc;
4996 else
4997 which = profile_function_entry_libfunc;
4999 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this_rtx, Pmode,
5000 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5002 Pmode);
5004 return const0_rtx;
5007 /* Expand a call to __builtin___clear_cache. */
5009 static rtx
5010 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5012 #ifndef HAVE_clear_cache
5013 #ifdef CLEAR_INSN_CACHE
5014 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5015 does something. Just do the default expansion to a call to
5016 __clear_cache(). */
5017 return NULL_RTX;
5018 #else
5019 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5020 does nothing. There is no need to call it. Do nothing. */
5021 return const0_rtx;
5022 #endif /* CLEAR_INSN_CACHE */
5023 #else
5024 /* We have a "clear_cache" insn, and it will handle everything. */
5025 tree begin, end;
5026 rtx begin_rtx, end_rtx;
5027 enum insn_code icode;
5029 /* We must not expand to a library call. If we did, any
5030 fallback library function in libgcc that might contain a call to
5031 __builtin___clear_cache() would recurse infinitely. */
5032 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5034 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5035 return const0_rtx;
5038 if (HAVE_clear_cache)
5040 icode = CODE_FOR_clear_cache;
5042 begin = CALL_EXPR_ARG (exp, 0);
5043 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5044 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5045 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5046 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5048 end = CALL_EXPR_ARG (exp, 1);
5049 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5050 end_rtx = convert_memory_address (Pmode, end_rtx);
5051 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5052 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5054 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5056 return const0_rtx;
5057 #endif /* HAVE_clear_cache */
5060 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5062 static rtx
5063 round_trampoline_addr (rtx tramp)
5065 rtx temp, addend, mask;
5067 /* If we don't need too much alignment, we'll have been guaranteed
5068 proper alignment by get_trampoline_type. */
5069 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5070 return tramp;
5072 /* Round address up to desired boundary. */
5073 temp = gen_reg_rtx (Pmode);
5074 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5075 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5077 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5078 temp, 0, OPTAB_LIB_WIDEN);
5079 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5080 temp, 0, OPTAB_LIB_WIDEN);
5082 return tramp;
5085 static rtx
5086 expand_builtin_init_trampoline (tree exp)
5088 tree t_tramp, t_func, t_chain;
5089 rtx m_tramp, r_tramp, r_chain, tmp;
5091 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5092 POINTER_TYPE, VOID_TYPE))
5093 return NULL_RTX;
5095 t_tramp = CALL_EXPR_ARG (exp, 0);
5096 t_func = CALL_EXPR_ARG (exp, 1);
5097 t_chain = CALL_EXPR_ARG (exp, 2);
5099 r_tramp = expand_normal (t_tramp);
5100 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5101 MEM_NOTRAP_P (m_tramp) = 1;
5103 /* The TRAMP argument should be the address of a field within the
5104 local function's FRAME decl. Let's see if we can fill in the
5105 to fill in the MEM_ATTRs for this memory. */
5106 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5107 set_mem_attributes_minus_bitpos (m_tramp, TREE_OPERAND (t_tramp, 0),
5108 true, 0);
5110 tmp = round_trampoline_addr (r_tramp);
5111 if (tmp != r_tramp)
5113 m_tramp = change_address (m_tramp, BLKmode, tmp);
5114 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5115 set_mem_size (m_tramp, GEN_INT (TRAMPOLINE_SIZE));
5118 /* The FUNC argument should be the address of the nested function.
5119 Extract the actual function decl to pass to the hook. */
5120 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5121 t_func = TREE_OPERAND (t_func, 0);
5122 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5124 r_chain = expand_normal (t_chain);
5126 /* Generate insns to initialize the trampoline. */
5127 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5129 trampolines_created = 1;
5130 return const0_rtx;
5133 static rtx
5134 expand_builtin_adjust_trampoline (tree exp)
5136 rtx tramp;
5138 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5139 return NULL_RTX;
5141 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5142 tramp = round_trampoline_addr (tramp);
5143 if (targetm.calls.trampoline_adjust_address)
5144 tramp = targetm.calls.trampoline_adjust_address (tramp);
5146 return tramp;
5149 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5150 function. The function first checks whether the back end provides
5151 an insn to implement signbit for the respective mode. If not, it
5152 checks whether the floating point format of the value is such that
5153 the sign bit can be extracted. If that is not the case, the
5154 function returns NULL_RTX to indicate that a normal call should be
5155 emitted rather than expanding the function in-line. EXP is the
5156 expression that is a call to the builtin function; if convenient,
5157 the result should be placed in TARGET. */
5158 static rtx
5159 expand_builtin_signbit (tree exp, rtx target)
5161 const struct real_format *fmt;
5162 enum machine_mode fmode, imode, rmode;
5163 HOST_WIDE_INT hi, lo;
5164 tree arg;
5165 int word, bitpos;
5166 enum insn_code icode;
5167 rtx temp;
5168 location_t loc = EXPR_LOCATION (exp);
5170 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5171 return NULL_RTX;
5173 arg = CALL_EXPR_ARG (exp, 0);
5174 fmode = TYPE_MODE (TREE_TYPE (arg));
5175 rmode = TYPE_MODE (TREE_TYPE (exp));
5176 fmt = REAL_MODE_FORMAT (fmode);
5178 arg = builtin_save_expr (arg);
5180 /* Expand the argument yielding a RTX expression. */
5181 temp = expand_normal (arg);
5183 /* Check if the back end provides an insn that handles signbit for the
5184 argument's mode. */
5185 icode = signbit_optab->handlers [(int) fmode].insn_code;
5186 if (icode != CODE_FOR_nothing)
5188 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5189 emit_unop_insn (icode, target, temp, UNKNOWN);
5190 return target;
5193 /* For floating point formats without a sign bit, implement signbit
5194 as "ARG < 0.0". */
5195 bitpos = fmt->signbit_ro;
5196 if (bitpos < 0)
5198 /* But we can't do this if the format supports signed zero. */
5199 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5200 return NULL_RTX;
5202 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5203 build_real (TREE_TYPE (arg), dconst0));
5204 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5207 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5209 imode = int_mode_for_mode (fmode);
5210 if (imode == BLKmode)
5211 return NULL_RTX;
5212 temp = gen_lowpart (imode, temp);
5214 else
5216 imode = word_mode;
5217 /* Handle targets with different FP word orders. */
5218 if (FLOAT_WORDS_BIG_ENDIAN)
5219 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5220 else
5221 word = bitpos / BITS_PER_WORD;
5222 temp = operand_subword_force (temp, word, fmode);
5223 bitpos = bitpos % BITS_PER_WORD;
5226 /* Force the intermediate word_mode (or narrower) result into a
5227 register. This avoids attempting to create paradoxical SUBREGs
5228 of floating point modes below. */
5229 temp = force_reg (imode, temp);
5231 /* If the bitpos is within the "result mode" lowpart, the operation
5232 can be implement with a single bitwise AND. Otherwise, we need
5233 a right shift and an AND. */
5235 if (bitpos < GET_MODE_BITSIZE (rmode))
5237 if (bitpos < HOST_BITS_PER_WIDE_INT)
5239 hi = 0;
5240 lo = (HOST_WIDE_INT) 1 << bitpos;
5242 else
5244 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5245 lo = 0;
5248 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5249 temp = gen_lowpart (rmode, temp);
5250 temp = expand_binop (rmode, and_optab, temp,
5251 immed_double_const (lo, hi, rmode),
5252 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5254 else
5256 /* Perform a logical right shift to place the signbit in the least
5257 significant bit, then truncate the result to the desired mode
5258 and mask just this bit. */
5259 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5260 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5261 temp = gen_lowpart (rmode, temp);
5262 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5263 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5266 return temp;
5269 /* Expand fork or exec calls. TARGET is the desired target of the
5270 call. EXP is the call. FN is the
5271 identificator of the actual function. IGNORE is nonzero if the
5272 value is to be ignored. */
5274 static rtx
5275 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5277 tree id, decl;
5278 tree call;
5280 /* If we are not profiling, just call the function. */
5281 if (!profile_arc_flag)
5282 return NULL_RTX;
5284 /* Otherwise call the wrapper. This should be equivalent for the rest of
5285 compiler, so the code does not diverge, and the wrapper may run the
5286 code necessary for keeping the profiling sane. */
5288 switch (DECL_FUNCTION_CODE (fn))
5290 case BUILT_IN_FORK:
5291 id = get_identifier ("__gcov_fork");
5292 break;
5294 case BUILT_IN_EXECL:
5295 id = get_identifier ("__gcov_execl");
5296 break;
5298 case BUILT_IN_EXECV:
5299 id = get_identifier ("__gcov_execv");
5300 break;
5302 case BUILT_IN_EXECLP:
5303 id = get_identifier ("__gcov_execlp");
5304 break;
5306 case BUILT_IN_EXECLE:
5307 id = get_identifier ("__gcov_execle");
5308 break;
5310 case BUILT_IN_EXECVP:
5311 id = get_identifier ("__gcov_execvp");
5312 break;
5314 case BUILT_IN_EXECVE:
5315 id = get_identifier ("__gcov_execve");
5316 break;
5318 default:
5319 gcc_unreachable ();
5322 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5323 FUNCTION_DECL, id, TREE_TYPE (fn));
5324 DECL_EXTERNAL (decl) = 1;
5325 TREE_PUBLIC (decl) = 1;
5326 DECL_ARTIFICIAL (decl) = 1;
5327 TREE_NOTHROW (decl) = 1;
5328 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5329 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5330 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5331 return expand_call (call, target, ignore);
5336 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5337 the pointer in these functions is void*, the tree optimizers may remove
5338 casts. The mode computed in expand_builtin isn't reliable either, due
5339 to __sync_bool_compare_and_swap.
5341 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5342 group of builtins. This gives us log2 of the mode size. */
5344 static inline enum machine_mode
5345 get_builtin_sync_mode (int fcode_diff)
5347 /* The size is not negotiable, so ask not to get BLKmode in return
5348 if the target indicates that a smaller size would be better. */
5349 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5352 /* Expand the memory expression LOC and return the appropriate memory operand
5353 for the builtin_sync operations. */
5355 static rtx
5356 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5358 rtx addr, mem;
5360 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5361 addr = convert_memory_address (Pmode, addr);
5363 /* Note that we explicitly do not want any alias information for this
5364 memory, so that we kill all other live memories. Otherwise we don't
5365 satisfy the full barrier semantics of the intrinsic. */
5366 mem = validize_mem (gen_rtx_MEM (mode, addr));
5368 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5369 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5370 MEM_VOLATILE_P (mem) = 1;
5372 return mem;
5375 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5376 EXP is the CALL_EXPR. CODE is the rtx code
5377 that corresponds to the arithmetic or logical operation from the name;
5378 an exception here is that NOT actually means NAND. TARGET is an optional
5379 place for us to store the results; AFTER is true if this is the
5380 fetch_and_xxx form. IGNORE is true if we don't actually care about
5381 the result of the operation at all. */
5383 static rtx
5384 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5385 enum rtx_code code, bool after,
5386 rtx target, bool ignore)
5388 rtx val, mem;
5389 enum machine_mode old_mode;
5390 location_t loc = EXPR_LOCATION (exp);
5392 if (code == NOT && warn_sync_nand)
5394 tree fndecl = get_callee_fndecl (exp);
5395 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5397 static bool warned_f_a_n, warned_n_a_f;
5399 switch (fcode)
5401 case BUILT_IN_FETCH_AND_NAND_1:
5402 case BUILT_IN_FETCH_AND_NAND_2:
5403 case BUILT_IN_FETCH_AND_NAND_4:
5404 case BUILT_IN_FETCH_AND_NAND_8:
5405 case BUILT_IN_FETCH_AND_NAND_16:
5407 if (warned_f_a_n)
5408 break;
5410 fndecl = implicit_built_in_decls[BUILT_IN_FETCH_AND_NAND_N];
5411 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5412 warned_f_a_n = true;
5413 break;
5415 case BUILT_IN_NAND_AND_FETCH_1:
5416 case BUILT_IN_NAND_AND_FETCH_2:
5417 case BUILT_IN_NAND_AND_FETCH_4:
5418 case BUILT_IN_NAND_AND_FETCH_8:
5419 case BUILT_IN_NAND_AND_FETCH_16:
5421 if (warned_n_a_f)
5422 break;
5424 fndecl = implicit_built_in_decls[BUILT_IN_NAND_AND_FETCH_N];
5425 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5426 warned_n_a_f = true;
5427 break;
5429 default:
5430 gcc_unreachable ();
5434 /* Expand the operands. */
5435 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5437 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5438 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5439 of CONST_INTs, where we know the old_mode only from the call argument. */
5440 old_mode = GET_MODE (val);
5441 if (old_mode == VOIDmode)
5442 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5443 val = convert_modes (mode, old_mode, val, 1);
5445 if (ignore)
5446 return expand_sync_operation (mem, val, code);
5447 else
5448 return expand_sync_fetch_operation (mem, val, code, after, target);
5451 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5452 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5453 true if this is the boolean form. TARGET is a place for us to store the
5454 results; this is NOT optional if IS_BOOL is true. */
5456 static rtx
5457 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5458 bool is_bool, rtx target)
5460 rtx old_val, new_val, mem;
5461 enum machine_mode old_mode;
5463 /* Expand the operands. */
5464 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5467 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5468 mode, EXPAND_NORMAL);
5469 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5470 of CONST_INTs, where we know the old_mode only from the call argument. */
5471 old_mode = GET_MODE (old_val);
5472 if (old_mode == VOIDmode)
5473 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5474 old_val = convert_modes (mode, old_mode, old_val, 1);
5476 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5477 mode, EXPAND_NORMAL);
5478 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5479 of CONST_INTs, where we know the old_mode only from the call argument. */
5480 old_mode = GET_MODE (new_val);
5481 if (old_mode == VOIDmode)
5482 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5483 new_val = convert_modes (mode, old_mode, new_val, 1);
5485 if (is_bool)
5486 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5487 else
5488 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5491 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5492 general form is actually an atomic exchange, and some targets only
5493 support a reduced form with the second argument being a constant 1.
5494 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5495 the results. */
5497 static rtx
5498 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5499 rtx target)
5501 rtx val, mem;
5502 enum machine_mode old_mode;
5504 /* Expand the operands. */
5505 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5506 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5507 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5508 of CONST_INTs, where we know the old_mode only from the call argument. */
5509 old_mode = GET_MODE (val);
5510 if (old_mode == VOIDmode)
5511 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5512 val = convert_modes (mode, old_mode, val, 1);
5514 return expand_sync_lock_test_and_set (mem, val, target);
5517 /* Expand the __sync_synchronize intrinsic. */
5519 static void
5520 expand_builtin_synchronize (void)
5522 gimple x;
5523 VEC (tree, gc) *v_clobbers;
5525 #ifdef HAVE_memory_barrier
5526 if (HAVE_memory_barrier)
5528 emit_insn (gen_memory_barrier ());
5529 return;
5531 #endif
5533 if (synchronize_libfunc != NULL_RTX)
5535 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
5536 return;
5539 /* If no explicit memory barrier instruction is available, create an
5540 empty asm stmt with a memory clobber. */
5541 v_clobbers = VEC_alloc (tree, gc, 1);
5542 VEC_quick_push (tree, v_clobbers,
5543 tree_cons (NULL, build_string (6, "memory"), NULL));
5544 x = gimple_build_asm_vec ("", NULL, NULL, v_clobbers, NULL);
5545 gimple_asm_set_volatile (x, true);
5546 expand_asm_stmt (x);
5549 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5551 static void
5552 expand_builtin_lock_release (enum machine_mode mode, tree exp)
5554 enum insn_code icode;
5555 rtx mem, insn;
5556 rtx val = const0_rtx;
5558 /* Expand the operands. */
5559 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5561 /* If there is an explicit operation in the md file, use it. */
5562 icode = sync_lock_release[mode];
5563 if (icode != CODE_FOR_nothing)
5565 if (!insn_data[icode].operand[1].predicate (val, mode))
5566 val = force_reg (mode, val);
5568 insn = GEN_FCN (icode) (mem, val);
5569 if (insn)
5571 emit_insn (insn);
5572 return;
5576 /* Otherwise we can implement this operation by emitting a barrier
5577 followed by a store of zero. */
5578 expand_builtin_synchronize ();
5579 emit_move_insn (mem, val);
5582 /* Expand an expression EXP that calls a built-in function,
5583 with result going to TARGET if that's convenient
5584 (and in mode MODE if that's convenient).
5585 SUBTARGET may be used as the target for computing one of EXP's operands.
5586 IGNORE is nonzero if the value is to be ignored. */
5589 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5590 int ignore)
5592 tree fndecl = get_callee_fndecl (exp);
5593 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5594 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5596 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5597 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5599 /* When not optimizing, generate calls to library functions for a certain
5600 set of builtins. */
5601 if (!optimize
5602 && !called_as_built_in (fndecl)
5603 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5604 && fcode != BUILT_IN_ALLOCA
5605 && fcode != BUILT_IN_FREE)
5606 return expand_call (exp, target, ignore);
5608 /* The built-in function expanders test for target == const0_rtx
5609 to determine whether the function's result will be ignored. */
5610 if (ignore)
5611 target = const0_rtx;
5613 /* If the result of a pure or const built-in function is ignored, and
5614 none of its arguments are volatile, we can avoid expanding the
5615 built-in call and just evaluate the arguments for side-effects. */
5616 if (target == const0_rtx
5617 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
5619 bool volatilep = false;
5620 tree arg;
5621 call_expr_arg_iterator iter;
5623 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5624 if (TREE_THIS_VOLATILE (arg))
5626 volatilep = true;
5627 break;
5630 if (! volatilep)
5632 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5633 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5634 return const0_rtx;
5638 switch (fcode)
5640 CASE_FLT_FN (BUILT_IN_FABS):
5641 target = expand_builtin_fabs (exp, target, subtarget);
5642 if (target)
5643 return target;
5644 break;
5646 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5647 target = expand_builtin_copysign (exp, target, subtarget);
5648 if (target)
5649 return target;
5650 break;
5652 /* Just do a normal library call if we were unable to fold
5653 the values. */
5654 CASE_FLT_FN (BUILT_IN_CABS):
5655 break;
5657 CASE_FLT_FN (BUILT_IN_EXP):
5658 CASE_FLT_FN (BUILT_IN_EXP10):
5659 CASE_FLT_FN (BUILT_IN_POW10):
5660 CASE_FLT_FN (BUILT_IN_EXP2):
5661 CASE_FLT_FN (BUILT_IN_EXPM1):
5662 CASE_FLT_FN (BUILT_IN_LOGB):
5663 CASE_FLT_FN (BUILT_IN_LOG):
5664 CASE_FLT_FN (BUILT_IN_LOG10):
5665 CASE_FLT_FN (BUILT_IN_LOG2):
5666 CASE_FLT_FN (BUILT_IN_LOG1P):
5667 CASE_FLT_FN (BUILT_IN_TAN):
5668 CASE_FLT_FN (BUILT_IN_ASIN):
5669 CASE_FLT_FN (BUILT_IN_ACOS):
5670 CASE_FLT_FN (BUILT_IN_ATAN):
5671 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5672 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5673 because of possible accuracy problems. */
5674 if (! flag_unsafe_math_optimizations)
5675 break;
5676 CASE_FLT_FN (BUILT_IN_SQRT):
5677 CASE_FLT_FN (BUILT_IN_FLOOR):
5678 CASE_FLT_FN (BUILT_IN_CEIL):
5679 CASE_FLT_FN (BUILT_IN_TRUNC):
5680 CASE_FLT_FN (BUILT_IN_ROUND):
5681 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5682 CASE_FLT_FN (BUILT_IN_RINT):
5683 target = expand_builtin_mathfn (exp, target, subtarget);
5684 if (target)
5685 return target;
5686 break;
5688 CASE_FLT_FN (BUILT_IN_ILOGB):
5689 if (! flag_unsafe_math_optimizations)
5690 break;
5691 CASE_FLT_FN (BUILT_IN_ISINF):
5692 CASE_FLT_FN (BUILT_IN_FINITE):
5693 case BUILT_IN_ISFINITE:
5694 case BUILT_IN_ISNORMAL:
5695 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
5696 if (target)
5697 return target;
5698 break;
5700 CASE_FLT_FN (BUILT_IN_LCEIL):
5701 CASE_FLT_FN (BUILT_IN_LLCEIL):
5702 CASE_FLT_FN (BUILT_IN_LFLOOR):
5703 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5704 target = expand_builtin_int_roundingfn (exp, target);
5705 if (target)
5706 return target;
5707 break;
5709 CASE_FLT_FN (BUILT_IN_LRINT):
5710 CASE_FLT_FN (BUILT_IN_LLRINT):
5711 CASE_FLT_FN (BUILT_IN_LROUND):
5712 CASE_FLT_FN (BUILT_IN_LLROUND):
5713 target = expand_builtin_int_roundingfn_2 (exp, target);
5714 if (target)
5715 return target;
5716 break;
5718 CASE_FLT_FN (BUILT_IN_POW):
5719 target = expand_builtin_pow (exp, target, subtarget);
5720 if (target)
5721 return target;
5722 break;
5724 CASE_FLT_FN (BUILT_IN_POWI):
5725 target = expand_builtin_powi (exp, target, subtarget);
5726 if (target)
5727 return target;
5728 break;
5730 CASE_FLT_FN (BUILT_IN_ATAN2):
5731 CASE_FLT_FN (BUILT_IN_LDEXP):
5732 CASE_FLT_FN (BUILT_IN_SCALB):
5733 CASE_FLT_FN (BUILT_IN_SCALBN):
5734 CASE_FLT_FN (BUILT_IN_SCALBLN):
5735 if (! flag_unsafe_math_optimizations)
5736 break;
5738 CASE_FLT_FN (BUILT_IN_FMOD):
5739 CASE_FLT_FN (BUILT_IN_REMAINDER):
5740 CASE_FLT_FN (BUILT_IN_DREM):
5741 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5742 if (target)
5743 return target;
5744 break;
5746 CASE_FLT_FN (BUILT_IN_CEXPI):
5747 target = expand_builtin_cexpi (exp, target, subtarget);
5748 gcc_assert (target);
5749 return target;
5751 CASE_FLT_FN (BUILT_IN_SIN):
5752 CASE_FLT_FN (BUILT_IN_COS):
5753 if (! flag_unsafe_math_optimizations)
5754 break;
5755 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5756 if (target)
5757 return target;
5758 break;
5760 CASE_FLT_FN (BUILT_IN_SINCOS):
5761 if (! flag_unsafe_math_optimizations)
5762 break;
5763 target = expand_builtin_sincos (exp);
5764 if (target)
5765 return target;
5766 break;
5768 case BUILT_IN_APPLY_ARGS:
5769 return expand_builtin_apply_args ();
5771 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5772 FUNCTION with a copy of the parameters described by
5773 ARGUMENTS, and ARGSIZE. It returns a block of memory
5774 allocated on the stack into which is stored all the registers
5775 that might possibly be used for returning the result of a
5776 function. ARGUMENTS is the value returned by
5777 __builtin_apply_args. ARGSIZE is the number of bytes of
5778 arguments that must be copied. ??? How should this value be
5779 computed? We'll also need a safe worst case value for varargs
5780 functions. */
5781 case BUILT_IN_APPLY:
5782 if (!validate_arglist (exp, POINTER_TYPE,
5783 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5784 && !validate_arglist (exp, REFERENCE_TYPE,
5785 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5786 return const0_rtx;
5787 else
5789 rtx ops[3];
5791 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5792 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5793 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5795 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5798 /* __builtin_return (RESULT) causes the function to return the
5799 value described by RESULT. RESULT is address of the block of
5800 memory returned by __builtin_apply. */
5801 case BUILT_IN_RETURN:
5802 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5803 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5804 return const0_rtx;
5806 case BUILT_IN_SAVEREGS:
5807 return expand_builtin_saveregs ();
5809 case BUILT_IN_ARGS_INFO:
5810 return expand_builtin_args_info (exp);
5812 case BUILT_IN_VA_ARG_PACK:
5813 /* All valid uses of __builtin_va_arg_pack () are removed during
5814 inlining. */
5815 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5816 return const0_rtx;
5818 case BUILT_IN_VA_ARG_PACK_LEN:
5819 /* All valid uses of __builtin_va_arg_pack_len () are removed during
5820 inlining. */
5821 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5822 return const0_rtx;
5824 /* Return the address of the first anonymous stack arg. */
5825 case BUILT_IN_NEXT_ARG:
5826 if (fold_builtin_next_arg (exp, false))
5827 return const0_rtx;
5828 return expand_builtin_next_arg ();
5830 case BUILT_IN_CLEAR_CACHE:
5831 target = expand_builtin___clear_cache (exp);
5832 if (target)
5833 return target;
5834 break;
5836 case BUILT_IN_CLASSIFY_TYPE:
5837 return expand_builtin_classify_type (exp);
5839 case BUILT_IN_CONSTANT_P:
5840 return const0_rtx;
5842 case BUILT_IN_FRAME_ADDRESS:
5843 case BUILT_IN_RETURN_ADDRESS:
5844 return expand_builtin_frame_address (fndecl, exp);
5846 /* Returns the address of the area where the structure is returned.
5847 0 otherwise. */
5848 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5849 if (call_expr_nargs (exp) != 0
5850 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5851 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5852 return const0_rtx;
5853 else
5854 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5856 case BUILT_IN_ALLOCA:
5857 target = expand_builtin_alloca (exp, target);
5858 if (target)
5859 return target;
5860 break;
5862 case BUILT_IN_STACK_SAVE:
5863 return expand_stack_save ();
5865 case BUILT_IN_STACK_RESTORE:
5866 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5867 return const0_rtx;
5869 case BUILT_IN_BSWAP32:
5870 case BUILT_IN_BSWAP64:
5871 target = expand_builtin_bswap (exp, target, subtarget);
5873 if (target)
5874 return target;
5875 break;
5877 CASE_INT_FN (BUILT_IN_FFS):
5878 case BUILT_IN_FFSIMAX:
5879 target = expand_builtin_unop (target_mode, exp, target,
5880 subtarget, ffs_optab);
5881 if (target)
5882 return target;
5883 break;
5885 CASE_INT_FN (BUILT_IN_CLZ):
5886 case BUILT_IN_CLZIMAX:
5887 target = expand_builtin_unop (target_mode, exp, target,
5888 subtarget, clz_optab);
5889 if (target)
5890 return target;
5891 break;
5893 CASE_INT_FN (BUILT_IN_CTZ):
5894 case BUILT_IN_CTZIMAX:
5895 target = expand_builtin_unop (target_mode, exp, target,
5896 subtarget, ctz_optab);
5897 if (target)
5898 return target;
5899 break;
5901 CASE_INT_FN (BUILT_IN_POPCOUNT):
5902 case BUILT_IN_POPCOUNTIMAX:
5903 target = expand_builtin_unop (target_mode, exp, target,
5904 subtarget, popcount_optab);
5905 if (target)
5906 return target;
5907 break;
5909 CASE_INT_FN (BUILT_IN_PARITY):
5910 case BUILT_IN_PARITYIMAX:
5911 target = expand_builtin_unop (target_mode, exp, target,
5912 subtarget, parity_optab);
5913 if (target)
5914 return target;
5915 break;
5917 case BUILT_IN_STRLEN:
5918 target = expand_builtin_strlen (exp, target, target_mode);
5919 if (target)
5920 return target;
5921 break;
5923 case BUILT_IN_STRCPY:
5924 target = expand_builtin_strcpy (exp, target);
5925 if (target)
5926 return target;
5927 break;
5929 case BUILT_IN_STRNCPY:
5930 target = expand_builtin_strncpy (exp, target);
5931 if (target)
5932 return target;
5933 break;
5935 case BUILT_IN_STPCPY:
5936 target = expand_builtin_stpcpy (exp, target, mode);
5937 if (target)
5938 return target;
5939 break;
5941 case BUILT_IN_MEMCPY:
5942 target = expand_builtin_memcpy (exp, target);
5943 if (target)
5944 return target;
5945 break;
5947 case BUILT_IN_MEMPCPY:
5948 target = expand_builtin_mempcpy (exp, target, mode);
5949 if (target)
5950 return target;
5951 break;
5953 case BUILT_IN_MEMSET:
5954 target = expand_builtin_memset (exp, target, mode);
5955 if (target)
5956 return target;
5957 break;
5959 case BUILT_IN_BZERO:
5960 target = expand_builtin_bzero (exp);
5961 if (target)
5962 return target;
5963 break;
5965 case BUILT_IN_STRCMP:
5966 target = expand_builtin_strcmp (exp, target);
5967 if (target)
5968 return target;
5969 break;
5971 case BUILT_IN_STRNCMP:
5972 target = expand_builtin_strncmp (exp, target, mode);
5973 if (target)
5974 return target;
5975 break;
5977 case BUILT_IN_BCMP:
5978 case BUILT_IN_MEMCMP:
5979 target = expand_builtin_memcmp (exp, target, mode);
5980 if (target)
5981 return target;
5982 break;
5984 case BUILT_IN_SETJMP:
5985 /* This should have been lowered to the builtins below. */
5986 gcc_unreachable ();
5988 case BUILT_IN_SETJMP_SETUP:
5989 /* __builtin_setjmp_setup is passed a pointer to an array of five words
5990 and the receiver label. */
5991 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5993 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5994 VOIDmode, EXPAND_NORMAL);
5995 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
5996 rtx label_r = label_rtx (label);
5998 /* This is copied from the handling of non-local gotos. */
5999 expand_builtin_setjmp_setup (buf_addr, label_r);
6000 nonlocal_goto_handler_labels
6001 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6002 nonlocal_goto_handler_labels);
6003 /* ??? Do not let expand_label treat us as such since we would
6004 not want to be both on the list of non-local labels and on
6005 the list of forced labels. */
6006 FORCED_LABEL (label) = 0;
6007 return const0_rtx;
6009 break;
6011 case BUILT_IN_SETJMP_DISPATCHER:
6012 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6013 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6015 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6016 rtx label_r = label_rtx (label);
6018 /* Remove the dispatcher label from the list of non-local labels
6019 since the receiver labels have been added to it above. */
6020 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6021 return const0_rtx;
6023 break;
6025 case BUILT_IN_SETJMP_RECEIVER:
6026 /* __builtin_setjmp_receiver is passed the receiver label. */
6027 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6029 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6030 rtx label_r = label_rtx (label);
6032 expand_builtin_setjmp_receiver (label_r);
6033 return const0_rtx;
6035 break;
6037 /* __builtin_longjmp is passed a pointer to an array of five words.
6038 It's similar to the C library longjmp function but works with
6039 __builtin_setjmp above. */
6040 case BUILT_IN_LONGJMP:
6041 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6043 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6044 VOIDmode, EXPAND_NORMAL);
6045 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6047 if (value != const1_rtx)
6049 error ("%<__builtin_longjmp%> second argument must be 1");
6050 return const0_rtx;
6053 expand_builtin_longjmp (buf_addr, value);
6054 return const0_rtx;
6056 break;
6058 case BUILT_IN_NONLOCAL_GOTO:
6059 target = expand_builtin_nonlocal_goto (exp);
6060 if (target)
6061 return target;
6062 break;
6064 /* This updates the setjmp buffer that is its argument with the value
6065 of the current stack pointer. */
6066 case BUILT_IN_UPDATE_SETJMP_BUF:
6067 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6069 rtx buf_addr
6070 = expand_normal (CALL_EXPR_ARG (exp, 0));
6072 expand_builtin_update_setjmp_buf (buf_addr);
6073 return const0_rtx;
6075 break;
6077 case BUILT_IN_TRAP:
6078 expand_builtin_trap ();
6079 return const0_rtx;
6081 case BUILT_IN_UNREACHABLE:
6082 expand_builtin_unreachable ();
6083 return const0_rtx;
6085 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6086 case BUILT_IN_SIGNBITD32:
6087 case BUILT_IN_SIGNBITD64:
6088 case BUILT_IN_SIGNBITD128:
6089 target = expand_builtin_signbit (exp, target);
6090 if (target)
6091 return target;
6092 break;
6094 /* Various hooks for the DWARF 2 __throw routine. */
6095 case BUILT_IN_UNWIND_INIT:
6096 expand_builtin_unwind_init ();
6097 return const0_rtx;
6098 case BUILT_IN_DWARF_CFA:
6099 return virtual_cfa_rtx;
6100 #ifdef DWARF2_UNWIND_INFO
6101 case BUILT_IN_DWARF_SP_COLUMN:
6102 return expand_builtin_dwarf_sp_column ();
6103 case BUILT_IN_INIT_DWARF_REG_SIZES:
6104 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6105 return const0_rtx;
6106 #endif
6107 case BUILT_IN_FROB_RETURN_ADDR:
6108 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6109 case BUILT_IN_EXTRACT_RETURN_ADDR:
6110 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6111 case BUILT_IN_EH_RETURN:
6112 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6113 CALL_EXPR_ARG (exp, 1));
6114 return const0_rtx;
6115 #ifdef EH_RETURN_DATA_REGNO
6116 case BUILT_IN_EH_RETURN_DATA_REGNO:
6117 return expand_builtin_eh_return_data_regno (exp);
6118 #endif
6119 case BUILT_IN_EXTEND_POINTER:
6120 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6121 case BUILT_IN_EH_POINTER:
6122 return expand_builtin_eh_pointer (exp);
6123 case BUILT_IN_EH_FILTER:
6124 return expand_builtin_eh_filter (exp);
6125 case BUILT_IN_EH_COPY_VALUES:
6126 return expand_builtin_eh_copy_values (exp);
6128 case BUILT_IN_VA_START:
6129 return expand_builtin_va_start (exp);
6130 case BUILT_IN_VA_END:
6131 return expand_builtin_va_end (exp);
6132 case BUILT_IN_VA_COPY:
6133 return expand_builtin_va_copy (exp);
6134 case BUILT_IN_EXPECT:
6135 return expand_builtin_expect (exp, target);
6136 case BUILT_IN_PREFETCH:
6137 expand_builtin_prefetch (exp);
6138 return const0_rtx;
6140 case BUILT_IN_PROFILE_FUNC_ENTER:
6141 return expand_builtin_profile_func (false);
6142 case BUILT_IN_PROFILE_FUNC_EXIT:
6143 return expand_builtin_profile_func (true);
6145 case BUILT_IN_INIT_TRAMPOLINE:
6146 return expand_builtin_init_trampoline (exp);
6147 case BUILT_IN_ADJUST_TRAMPOLINE:
6148 return expand_builtin_adjust_trampoline (exp);
6150 case BUILT_IN_FORK:
6151 case BUILT_IN_EXECL:
6152 case BUILT_IN_EXECV:
6153 case BUILT_IN_EXECLP:
6154 case BUILT_IN_EXECLE:
6155 case BUILT_IN_EXECVP:
6156 case BUILT_IN_EXECVE:
6157 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6158 if (target)
6159 return target;
6160 break;
6162 case BUILT_IN_FETCH_AND_ADD_1:
6163 case BUILT_IN_FETCH_AND_ADD_2:
6164 case BUILT_IN_FETCH_AND_ADD_4:
6165 case BUILT_IN_FETCH_AND_ADD_8:
6166 case BUILT_IN_FETCH_AND_ADD_16:
6167 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6168 target = expand_builtin_sync_operation (mode, exp, PLUS,
6169 false, target, ignore);
6170 if (target)
6171 return target;
6172 break;
6174 case BUILT_IN_FETCH_AND_SUB_1:
6175 case BUILT_IN_FETCH_AND_SUB_2:
6176 case BUILT_IN_FETCH_AND_SUB_4:
6177 case BUILT_IN_FETCH_AND_SUB_8:
6178 case BUILT_IN_FETCH_AND_SUB_16:
6179 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6180 target = expand_builtin_sync_operation (mode, exp, MINUS,
6181 false, target, ignore);
6182 if (target)
6183 return target;
6184 break;
6186 case BUILT_IN_FETCH_AND_OR_1:
6187 case BUILT_IN_FETCH_AND_OR_2:
6188 case BUILT_IN_FETCH_AND_OR_4:
6189 case BUILT_IN_FETCH_AND_OR_8:
6190 case BUILT_IN_FETCH_AND_OR_16:
6191 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6192 target = expand_builtin_sync_operation (mode, exp, IOR,
6193 false, target, ignore);
6194 if (target)
6195 return target;
6196 break;
6198 case BUILT_IN_FETCH_AND_AND_1:
6199 case BUILT_IN_FETCH_AND_AND_2:
6200 case BUILT_IN_FETCH_AND_AND_4:
6201 case BUILT_IN_FETCH_AND_AND_8:
6202 case BUILT_IN_FETCH_AND_AND_16:
6203 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6204 target = expand_builtin_sync_operation (mode, exp, AND,
6205 false, target, ignore);
6206 if (target)
6207 return target;
6208 break;
6210 case BUILT_IN_FETCH_AND_XOR_1:
6211 case BUILT_IN_FETCH_AND_XOR_2:
6212 case BUILT_IN_FETCH_AND_XOR_4:
6213 case BUILT_IN_FETCH_AND_XOR_8:
6214 case BUILT_IN_FETCH_AND_XOR_16:
6215 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6216 target = expand_builtin_sync_operation (mode, exp, XOR,
6217 false, target, ignore);
6218 if (target)
6219 return target;
6220 break;
6222 case BUILT_IN_FETCH_AND_NAND_1:
6223 case BUILT_IN_FETCH_AND_NAND_2:
6224 case BUILT_IN_FETCH_AND_NAND_4:
6225 case BUILT_IN_FETCH_AND_NAND_8:
6226 case BUILT_IN_FETCH_AND_NAND_16:
6227 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6228 target = expand_builtin_sync_operation (mode, exp, NOT,
6229 false, target, ignore);
6230 if (target)
6231 return target;
6232 break;
6234 case BUILT_IN_ADD_AND_FETCH_1:
6235 case BUILT_IN_ADD_AND_FETCH_2:
6236 case BUILT_IN_ADD_AND_FETCH_4:
6237 case BUILT_IN_ADD_AND_FETCH_8:
6238 case BUILT_IN_ADD_AND_FETCH_16:
6239 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6240 target = expand_builtin_sync_operation (mode, exp, PLUS,
6241 true, target, ignore);
6242 if (target)
6243 return target;
6244 break;
6246 case BUILT_IN_SUB_AND_FETCH_1:
6247 case BUILT_IN_SUB_AND_FETCH_2:
6248 case BUILT_IN_SUB_AND_FETCH_4:
6249 case BUILT_IN_SUB_AND_FETCH_8:
6250 case BUILT_IN_SUB_AND_FETCH_16:
6251 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6252 target = expand_builtin_sync_operation (mode, exp, MINUS,
6253 true, target, ignore);
6254 if (target)
6255 return target;
6256 break;
6258 case BUILT_IN_OR_AND_FETCH_1:
6259 case BUILT_IN_OR_AND_FETCH_2:
6260 case BUILT_IN_OR_AND_FETCH_4:
6261 case BUILT_IN_OR_AND_FETCH_8:
6262 case BUILT_IN_OR_AND_FETCH_16:
6263 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6264 target = expand_builtin_sync_operation (mode, exp, IOR,
6265 true, target, ignore);
6266 if (target)
6267 return target;
6268 break;
6270 case BUILT_IN_AND_AND_FETCH_1:
6271 case BUILT_IN_AND_AND_FETCH_2:
6272 case BUILT_IN_AND_AND_FETCH_4:
6273 case BUILT_IN_AND_AND_FETCH_8:
6274 case BUILT_IN_AND_AND_FETCH_16:
6275 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6276 target = expand_builtin_sync_operation (mode, exp, AND,
6277 true, target, ignore);
6278 if (target)
6279 return target;
6280 break;
6282 case BUILT_IN_XOR_AND_FETCH_1:
6283 case BUILT_IN_XOR_AND_FETCH_2:
6284 case BUILT_IN_XOR_AND_FETCH_4:
6285 case BUILT_IN_XOR_AND_FETCH_8:
6286 case BUILT_IN_XOR_AND_FETCH_16:
6287 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6288 target = expand_builtin_sync_operation (mode, exp, XOR,
6289 true, target, ignore);
6290 if (target)
6291 return target;
6292 break;
6294 case BUILT_IN_NAND_AND_FETCH_1:
6295 case BUILT_IN_NAND_AND_FETCH_2:
6296 case BUILT_IN_NAND_AND_FETCH_4:
6297 case BUILT_IN_NAND_AND_FETCH_8:
6298 case BUILT_IN_NAND_AND_FETCH_16:
6299 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6300 target = expand_builtin_sync_operation (mode, exp, NOT,
6301 true, target, ignore);
6302 if (target)
6303 return target;
6304 break;
6306 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6307 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6308 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6309 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6310 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6311 if (mode == VOIDmode)
6312 mode = TYPE_MODE (boolean_type_node);
6313 if (!target || !register_operand (target, mode))
6314 target = gen_reg_rtx (mode);
6316 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6317 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6318 if (target)
6319 return target;
6320 break;
6322 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6323 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6324 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6325 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6326 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6327 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6328 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6329 if (target)
6330 return target;
6331 break;
6333 case BUILT_IN_LOCK_TEST_AND_SET_1:
6334 case BUILT_IN_LOCK_TEST_AND_SET_2:
6335 case BUILT_IN_LOCK_TEST_AND_SET_4:
6336 case BUILT_IN_LOCK_TEST_AND_SET_8:
6337 case BUILT_IN_LOCK_TEST_AND_SET_16:
6338 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6339 target = expand_builtin_lock_test_and_set (mode, exp, target);
6340 if (target)
6341 return target;
6342 break;
6344 case BUILT_IN_LOCK_RELEASE_1:
6345 case BUILT_IN_LOCK_RELEASE_2:
6346 case BUILT_IN_LOCK_RELEASE_4:
6347 case BUILT_IN_LOCK_RELEASE_8:
6348 case BUILT_IN_LOCK_RELEASE_16:
6349 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6350 expand_builtin_lock_release (mode, exp);
6351 return const0_rtx;
6353 case BUILT_IN_SYNCHRONIZE:
6354 expand_builtin_synchronize ();
6355 return const0_rtx;
6357 case BUILT_IN_OBJECT_SIZE:
6358 return expand_builtin_object_size (exp);
6360 case BUILT_IN_MEMCPY_CHK:
6361 case BUILT_IN_MEMPCPY_CHK:
6362 case BUILT_IN_MEMMOVE_CHK:
6363 case BUILT_IN_MEMSET_CHK:
6364 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6365 if (target)
6366 return target;
6367 break;
6369 case BUILT_IN_STRCPY_CHK:
6370 case BUILT_IN_STPCPY_CHK:
6371 case BUILT_IN_STRNCPY_CHK:
6372 case BUILT_IN_STRCAT_CHK:
6373 case BUILT_IN_STRNCAT_CHK:
6374 case BUILT_IN_SNPRINTF_CHK:
6375 case BUILT_IN_VSNPRINTF_CHK:
6376 maybe_emit_chk_warning (exp, fcode);
6377 break;
6379 case BUILT_IN_SPRINTF_CHK:
6380 case BUILT_IN_VSPRINTF_CHK:
6381 maybe_emit_sprintf_chk_warning (exp, fcode);
6382 break;
6384 case BUILT_IN_FREE:
6385 maybe_emit_free_warning (exp);
6386 break;
6388 default: /* just do library call, if unknown builtin */
6389 break;
6392 /* The switch statement above can drop through to cause the function
6393 to be called normally. */
6394 return expand_call (exp, target, ignore);
6397 /* Determine whether a tree node represents a call to a built-in
6398 function. If the tree T is a call to a built-in function with
6399 the right number of arguments of the appropriate types, return
6400 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6401 Otherwise the return value is END_BUILTINS. */
6403 enum built_in_function
6404 builtin_mathfn_code (const_tree t)
6406 const_tree fndecl, arg, parmlist;
6407 const_tree argtype, parmtype;
6408 const_call_expr_arg_iterator iter;
6410 if (TREE_CODE (t) != CALL_EXPR
6411 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6412 return END_BUILTINS;
6414 fndecl = get_callee_fndecl (t);
6415 if (fndecl == NULL_TREE
6416 || TREE_CODE (fndecl) != FUNCTION_DECL
6417 || ! DECL_BUILT_IN (fndecl)
6418 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6419 return END_BUILTINS;
6421 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6422 init_const_call_expr_arg_iterator (t, &iter);
6423 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6425 /* If a function doesn't take a variable number of arguments,
6426 the last element in the list will have type `void'. */
6427 parmtype = TREE_VALUE (parmlist);
6428 if (VOID_TYPE_P (parmtype))
6430 if (more_const_call_expr_args_p (&iter))
6431 return END_BUILTINS;
6432 return DECL_FUNCTION_CODE (fndecl);
6435 if (! more_const_call_expr_args_p (&iter))
6436 return END_BUILTINS;
6438 arg = next_const_call_expr_arg (&iter);
6439 argtype = TREE_TYPE (arg);
6441 if (SCALAR_FLOAT_TYPE_P (parmtype))
6443 if (! SCALAR_FLOAT_TYPE_P (argtype))
6444 return END_BUILTINS;
6446 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6448 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6449 return END_BUILTINS;
6451 else if (POINTER_TYPE_P (parmtype))
6453 if (! POINTER_TYPE_P (argtype))
6454 return END_BUILTINS;
6456 else if (INTEGRAL_TYPE_P (parmtype))
6458 if (! INTEGRAL_TYPE_P (argtype))
6459 return END_BUILTINS;
6461 else
6462 return END_BUILTINS;
6465 /* Variable-length argument list. */
6466 return DECL_FUNCTION_CODE (fndecl);
6469 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6470 evaluate to a constant. */
6472 static tree
6473 fold_builtin_constant_p (tree arg)
6475 /* We return 1 for a numeric type that's known to be a constant
6476 value at compile-time or for an aggregate type that's a
6477 literal constant. */
6478 STRIP_NOPS (arg);
6480 /* If we know this is a constant, emit the constant of one. */
6481 if (CONSTANT_CLASS_P (arg)
6482 || (TREE_CODE (arg) == CONSTRUCTOR
6483 && TREE_CONSTANT (arg)))
6484 return integer_one_node;
6485 if (TREE_CODE (arg) == ADDR_EXPR)
6487 tree op = TREE_OPERAND (arg, 0);
6488 if (TREE_CODE (op) == STRING_CST
6489 || (TREE_CODE (op) == ARRAY_REF
6490 && integer_zerop (TREE_OPERAND (op, 1))
6491 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6492 return integer_one_node;
6495 /* If this expression has side effects, show we don't know it to be a
6496 constant. Likewise if it's a pointer or aggregate type since in
6497 those case we only want literals, since those are only optimized
6498 when generating RTL, not later.
6499 And finally, if we are compiling an initializer, not code, we
6500 need to return a definite result now; there's not going to be any
6501 more optimization done. */
6502 if (TREE_SIDE_EFFECTS (arg)
6503 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6504 || POINTER_TYPE_P (TREE_TYPE (arg))
6505 || cfun == 0
6506 || folding_initializer)
6507 return integer_zero_node;
6509 return NULL_TREE;
6512 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6513 return it as a truthvalue. */
6515 static tree
6516 build_builtin_expect_predicate (location_t loc, tree pred, tree expected)
6518 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6520 fn = built_in_decls[BUILT_IN_EXPECT];
6521 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6522 ret_type = TREE_TYPE (TREE_TYPE (fn));
6523 pred_type = TREE_VALUE (arg_types);
6524 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6526 pred = fold_convert_loc (loc, pred_type, pred);
6527 expected = fold_convert_loc (loc, expected_type, expected);
6528 call_expr = build_call_expr_loc (loc, fn, 2, pred, expected);
6530 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6531 build_int_cst (ret_type, 0));
6534 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
6535 NULL_TREE if no simplification is possible. */
6537 static tree
6538 fold_builtin_expect (location_t loc, tree arg0, tree arg1)
6540 tree inner, fndecl;
6541 enum tree_code code;
6543 /* If this is a builtin_expect within a builtin_expect keep the
6544 inner one. See through a comparison against a constant. It
6545 might have been added to create a thruthvalue. */
6546 inner = arg0;
6547 if (COMPARISON_CLASS_P (inner)
6548 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6549 inner = TREE_OPERAND (inner, 0);
6551 if (TREE_CODE (inner) == CALL_EXPR
6552 && (fndecl = get_callee_fndecl (inner))
6553 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6554 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6555 return arg0;
6557 /* Distribute the expected value over short-circuiting operators.
6558 See through the cast from truthvalue_type_node to long. */
6559 inner = arg0;
6560 while (TREE_CODE (inner) == NOP_EXPR
6561 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
6562 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
6563 inner = TREE_OPERAND (inner, 0);
6565 code = TREE_CODE (inner);
6566 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6568 tree op0 = TREE_OPERAND (inner, 0);
6569 tree op1 = TREE_OPERAND (inner, 1);
6571 op0 = build_builtin_expect_predicate (loc, op0, arg1);
6572 op1 = build_builtin_expect_predicate (loc, op1, arg1);
6573 inner = build2 (code, TREE_TYPE (inner), op0, op1);
6575 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6578 /* If the argument isn't invariant then there's nothing else we can do. */
6579 if (!TREE_CONSTANT (arg0))
6580 return NULL_TREE;
6582 /* If we expect that a comparison against the argument will fold to
6583 a constant return the constant. In practice, this means a true
6584 constant or the address of a non-weak symbol. */
6585 inner = arg0;
6586 STRIP_NOPS (inner);
6587 if (TREE_CODE (inner) == ADDR_EXPR)
6591 inner = TREE_OPERAND (inner, 0);
6593 while (TREE_CODE (inner) == COMPONENT_REF
6594 || TREE_CODE (inner) == ARRAY_REF);
6595 if ((TREE_CODE (inner) == VAR_DECL
6596 || TREE_CODE (inner) == FUNCTION_DECL)
6597 && DECL_WEAK (inner))
6598 return NULL_TREE;
6601 /* Otherwise, ARG0 already has the proper type for the return value. */
6602 return arg0;
6605 /* Fold a call to __builtin_classify_type with argument ARG. */
6607 static tree
6608 fold_builtin_classify_type (tree arg)
6610 if (arg == 0)
6611 return build_int_cst (NULL_TREE, no_type_class);
6613 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
6616 /* Fold a call to __builtin_strlen with argument ARG. */
6618 static tree
6619 fold_builtin_strlen (location_t loc, tree arg)
6621 if (!validate_arg (arg, POINTER_TYPE))
6622 return NULL_TREE;
6623 else
6625 tree len = c_strlen (arg, 0);
6627 if (len)
6629 /* Convert from the internal "sizetype" type to "size_t". */
6630 if (size_type_node)
6631 len = fold_convert_loc (loc, size_type_node, len);
6632 return len;
6635 return NULL_TREE;
6639 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6641 static tree
6642 fold_builtin_inf (location_t loc, tree type, int warn)
6644 REAL_VALUE_TYPE real;
6646 /* __builtin_inff is intended to be usable to define INFINITY on all
6647 targets. If an infinity is not available, INFINITY expands "to a
6648 positive constant of type float that overflows at translation
6649 time", footnote "In this case, using INFINITY will violate the
6650 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6651 Thus we pedwarn to ensure this constraint violation is
6652 diagnosed. */
6653 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
6654 pedwarn (loc, 0, "target format does not support infinity");
6656 real_inf (&real);
6657 return build_real (type, real);
6660 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6662 static tree
6663 fold_builtin_nan (tree arg, tree type, int quiet)
6665 REAL_VALUE_TYPE real;
6666 const char *str;
6668 if (!validate_arg (arg, POINTER_TYPE))
6669 return NULL_TREE;
6670 str = c_getstr (arg);
6671 if (!str)
6672 return NULL_TREE;
6674 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
6675 return NULL_TREE;
6677 return build_real (type, real);
6680 /* Return true if the floating point expression T has an integer value.
6681 We also allow +Inf, -Inf and NaN to be considered integer values. */
6683 static bool
6684 integer_valued_real_p (tree t)
6686 switch (TREE_CODE (t))
6688 case FLOAT_EXPR:
6689 return true;
6691 case ABS_EXPR:
6692 case SAVE_EXPR:
6693 return integer_valued_real_p (TREE_OPERAND (t, 0));
6695 case COMPOUND_EXPR:
6696 case MODIFY_EXPR:
6697 case BIND_EXPR:
6698 return integer_valued_real_p (TREE_OPERAND (t, 1));
6700 case PLUS_EXPR:
6701 case MINUS_EXPR:
6702 case MULT_EXPR:
6703 case MIN_EXPR:
6704 case MAX_EXPR:
6705 return integer_valued_real_p (TREE_OPERAND (t, 0))
6706 && integer_valued_real_p (TREE_OPERAND (t, 1));
6708 case COND_EXPR:
6709 return integer_valued_real_p (TREE_OPERAND (t, 1))
6710 && integer_valued_real_p (TREE_OPERAND (t, 2));
6712 case REAL_CST:
6713 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
6715 case NOP_EXPR:
6717 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
6718 if (TREE_CODE (type) == INTEGER_TYPE)
6719 return true;
6720 if (TREE_CODE (type) == REAL_TYPE)
6721 return integer_valued_real_p (TREE_OPERAND (t, 0));
6722 break;
6725 case CALL_EXPR:
6726 switch (builtin_mathfn_code (t))
6728 CASE_FLT_FN (BUILT_IN_CEIL):
6729 CASE_FLT_FN (BUILT_IN_FLOOR):
6730 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6731 CASE_FLT_FN (BUILT_IN_RINT):
6732 CASE_FLT_FN (BUILT_IN_ROUND):
6733 CASE_FLT_FN (BUILT_IN_TRUNC):
6734 return true;
6736 CASE_FLT_FN (BUILT_IN_FMIN):
6737 CASE_FLT_FN (BUILT_IN_FMAX):
6738 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
6739 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
6741 default:
6742 break;
6744 break;
6746 default:
6747 break;
6749 return false;
6752 /* FNDECL is assumed to be a builtin where truncation can be propagated
6753 across (for instance floor((double)f) == (double)floorf (f).
6754 Do the transformation for a call with argument ARG. */
6756 static tree
6757 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6759 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6761 if (!validate_arg (arg, REAL_TYPE))
6762 return NULL_TREE;
6764 /* Integer rounding functions are idempotent. */
6765 if (fcode == builtin_mathfn_code (arg))
6766 return arg;
6768 /* If argument is already integer valued, and we don't need to worry
6769 about setting errno, there's no need to perform rounding. */
6770 if (! flag_errno_math && integer_valued_real_p (arg))
6771 return arg;
6773 if (optimize)
6775 tree arg0 = strip_float_extensions (arg);
6776 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6777 tree newtype = TREE_TYPE (arg0);
6778 tree decl;
6780 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6781 && (decl = mathfn_built_in (newtype, fcode)))
6782 return fold_convert_loc (loc, ftype,
6783 build_call_expr_loc (loc, decl, 1,
6784 fold_convert_loc (loc,
6785 newtype,
6786 arg0)));
6788 return NULL_TREE;
6791 /* FNDECL is assumed to be builtin which can narrow the FP type of
6792 the argument, for instance lround((double)f) -> lroundf (f).
6793 Do the transformation for a call with argument ARG. */
6795 static tree
6796 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
6798 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6800 if (!validate_arg (arg, REAL_TYPE))
6801 return NULL_TREE;
6803 /* If argument is already integer valued, and we don't need to worry
6804 about setting errno, there's no need to perform rounding. */
6805 if (! flag_errno_math && integer_valued_real_p (arg))
6806 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
6807 TREE_TYPE (TREE_TYPE (fndecl)), arg);
6809 if (optimize)
6811 tree ftype = TREE_TYPE (arg);
6812 tree arg0 = strip_float_extensions (arg);
6813 tree newtype = TREE_TYPE (arg0);
6814 tree decl;
6816 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
6817 && (decl = mathfn_built_in (newtype, fcode)))
6818 return build_call_expr_loc (loc, decl, 1,
6819 fold_convert_loc (loc, newtype, arg0));
6822 /* Canonicalize llround (x) to lround (x) on LP64 targets where
6823 sizeof (long long) == sizeof (long). */
6824 if (TYPE_PRECISION (long_long_integer_type_node)
6825 == TYPE_PRECISION (long_integer_type_node))
6827 tree newfn = NULL_TREE;
6828 switch (fcode)
6830 CASE_FLT_FN (BUILT_IN_LLCEIL):
6831 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
6832 break;
6834 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6835 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
6836 break;
6838 CASE_FLT_FN (BUILT_IN_LLROUND):
6839 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
6840 break;
6842 CASE_FLT_FN (BUILT_IN_LLRINT):
6843 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
6844 break;
6846 default:
6847 break;
6850 if (newfn)
6852 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
6853 return fold_convert_loc (loc,
6854 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
6858 return NULL_TREE;
6861 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
6862 return type. Return NULL_TREE if no simplification can be made. */
6864 static tree
6865 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
6867 tree res;
6869 if (!validate_arg (arg, COMPLEX_TYPE)
6870 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
6871 return NULL_TREE;
6873 /* Calculate the result when the argument is a constant. */
6874 if (TREE_CODE (arg) == COMPLEX_CST
6875 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
6876 type, mpfr_hypot)))
6877 return res;
6879 if (TREE_CODE (arg) == COMPLEX_EXPR)
6881 tree real = TREE_OPERAND (arg, 0);
6882 tree imag = TREE_OPERAND (arg, 1);
6884 /* If either part is zero, cabs is fabs of the other. */
6885 if (real_zerop (real))
6886 return fold_build1_loc (loc, ABS_EXPR, type, imag);
6887 if (real_zerop (imag))
6888 return fold_build1_loc (loc, ABS_EXPR, type, real);
6890 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
6891 if (flag_unsafe_math_optimizations
6892 && operand_equal_p (real, imag, OEP_PURE_SAME))
6894 const REAL_VALUE_TYPE sqrt2_trunc
6895 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
6896 STRIP_NOPS (real);
6897 return fold_build2_loc (loc, MULT_EXPR, type,
6898 fold_build1_loc (loc, ABS_EXPR, type, real),
6899 build_real (type, sqrt2_trunc));
6903 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
6904 if (TREE_CODE (arg) == NEGATE_EXPR
6905 || TREE_CODE (arg) == CONJ_EXPR)
6906 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
6908 /* Don't do this when optimizing for size. */
6909 if (flag_unsafe_math_optimizations
6910 && optimize && optimize_function_for_speed_p (cfun))
6912 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
6914 if (sqrtfn != NULL_TREE)
6916 tree rpart, ipart, result;
6918 arg = builtin_save_expr (arg);
6920 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
6921 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
6923 rpart = builtin_save_expr (rpart);
6924 ipart = builtin_save_expr (ipart);
6926 result = fold_build2_loc (loc, PLUS_EXPR, type,
6927 fold_build2_loc (loc, MULT_EXPR, type,
6928 rpart, rpart),
6929 fold_build2_loc (loc, MULT_EXPR, type,
6930 ipart, ipart));
6932 return build_call_expr_loc (loc, sqrtfn, 1, result);
6936 return NULL_TREE;
6939 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
6940 Return NULL_TREE if no simplification can be made. */
6942 static tree
6943 fold_builtin_sqrt (location_t loc, tree arg, tree type)
6946 enum built_in_function fcode;
6947 tree res;
6949 if (!validate_arg (arg, REAL_TYPE))
6950 return NULL_TREE;
6952 /* Calculate the result when the argument is a constant. */
6953 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
6954 return res;
6956 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6957 fcode = builtin_mathfn_code (arg);
6958 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
6960 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
6961 arg = fold_build2_loc (loc, MULT_EXPR, type,
6962 CALL_EXPR_ARG (arg, 0),
6963 build_real (type, dconsthalf));
6964 return build_call_expr_loc (loc, expfn, 1, arg);
6967 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6968 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
6970 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6972 if (powfn)
6974 tree arg0 = CALL_EXPR_ARG (arg, 0);
6975 tree tree_root;
6976 /* The inner root was either sqrt or cbrt. */
6977 /* This was a conditional expression but it triggered a bug
6978 in Sun C 5.5. */
6979 REAL_VALUE_TYPE dconstroot;
6980 if (BUILTIN_SQRT_P (fcode))
6981 dconstroot = dconsthalf;
6982 else
6983 dconstroot = dconst_third ();
6985 /* Adjust for the outer root. */
6986 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
6987 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
6988 tree_root = build_real (type, dconstroot);
6989 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
6993 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6994 if (flag_unsafe_math_optimizations
6995 && (fcode == BUILT_IN_POW
6996 || fcode == BUILT_IN_POWF
6997 || fcode == BUILT_IN_POWL))
6999 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7000 tree arg0 = CALL_EXPR_ARG (arg, 0);
7001 tree arg1 = CALL_EXPR_ARG (arg, 1);
7002 tree narg1;
7003 if (!tree_expr_nonnegative_p (arg0))
7004 arg0 = build1 (ABS_EXPR, type, arg0);
7005 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7006 build_real (type, dconsthalf));
7007 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7010 return NULL_TREE;
7013 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7014 Return NULL_TREE if no simplification can be made. */
7016 static tree
7017 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7019 const enum built_in_function fcode = builtin_mathfn_code (arg);
7020 tree res;
7022 if (!validate_arg (arg, REAL_TYPE))
7023 return NULL_TREE;
7025 /* Calculate the result when the argument is a constant. */
7026 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7027 return res;
7029 if (flag_unsafe_math_optimizations)
7031 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7032 if (BUILTIN_EXPONENT_P (fcode))
7034 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7035 const REAL_VALUE_TYPE third_trunc =
7036 real_value_truncate (TYPE_MODE (type), dconst_third ());
7037 arg = fold_build2_loc (loc, MULT_EXPR, type,
7038 CALL_EXPR_ARG (arg, 0),
7039 build_real (type, third_trunc));
7040 return build_call_expr_loc (loc, expfn, 1, arg);
7043 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7044 if (BUILTIN_SQRT_P (fcode))
7046 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7048 if (powfn)
7050 tree arg0 = CALL_EXPR_ARG (arg, 0);
7051 tree tree_root;
7052 REAL_VALUE_TYPE dconstroot = dconst_third ();
7054 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7055 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7056 tree_root = build_real (type, dconstroot);
7057 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7061 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7062 if (BUILTIN_CBRT_P (fcode))
7064 tree arg0 = CALL_EXPR_ARG (arg, 0);
7065 if (tree_expr_nonnegative_p (arg0))
7067 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7069 if (powfn)
7071 tree tree_root;
7072 REAL_VALUE_TYPE dconstroot;
7074 real_arithmetic (&dconstroot, MULT_EXPR,
7075 dconst_third_ptr (), dconst_third_ptr ());
7076 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7077 tree_root = build_real (type, dconstroot);
7078 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7083 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7084 if (fcode == BUILT_IN_POW
7085 || fcode == BUILT_IN_POWF
7086 || fcode == BUILT_IN_POWL)
7088 tree arg00 = CALL_EXPR_ARG (arg, 0);
7089 tree arg01 = CALL_EXPR_ARG (arg, 1);
7090 if (tree_expr_nonnegative_p (arg00))
7092 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7093 const REAL_VALUE_TYPE dconstroot
7094 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7095 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7096 build_real (type, dconstroot));
7097 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7101 return NULL_TREE;
7104 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7105 TYPE is the type of the return value. Return NULL_TREE if no
7106 simplification can be made. */
7108 static tree
7109 fold_builtin_cos (location_t loc,
7110 tree arg, tree type, tree fndecl)
7112 tree res, narg;
7114 if (!validate_arg (arg, REAL_TYPE))
7115 return NULL_TREE;
7117 /* Calculate the result when the argument is a constant. */
7118 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7119 return res;
7121 /* Optimize cos(-x) into cos (x). */
7122 if ((narg = fold_strip_sign_ops (arg)))
7123 return build_call_expr_loc (loc, fndecl, 1, narg);
7125 return NULL_TREE;
7128 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7129 Return NULL_TREE if no simplification can be made. */
7131 static tree
7132 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7134 if (validate_arg (arg, REAL_TYPE))
7136 tree res, narg;
7138 /* Calculate the result when the argument is a constant. */
7139 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7140 return res;
7142 /* Optimize cosh(-x) into cosh (x). */
7143 if ((narg = fold_strip_sign_ops (arg)))
7144 return build_call_expr_loc (loc, fndecl, 1, narg);
7147 return NULL_TREE;
7150 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7151 argument ARG. TYPE is the type of the return value. Return
7152 NULL_TREE if no simplification can be made. */
7154 static tree
7155 fold_builtin_ccos (location_t loc,
7156 tree arg, tree type ATTRIBUTE_UNUSED, tree fndecl,
7157 bool hyper ATTRIBUTE_UNUSED)
7159 if (validate_arg (arg, COMPLEX_TYPE)
7160 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7162 tree tmp;
7164 #ifdef HAVE_mpc
7165 /* Calculate the result when the argument is a constant. */
7166 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7167 return tmp;
7168 #endif
7170 /* Optimize fn(-x) into fn(x). */
7171 if ((tmp = fold_strip_sign_ops (arg)))
7172 return build_call_expr_loc (loc, fndecl, 1, tmp);
7175 return NULL_TREE;
7178 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7179 Return NULL_TREE if no simplification can be made. */
7181 static tree
7182 fold_builtin_tan (tree arg, tree type)
7184 enum built_in_function fcode;
7185 tree res;
7187 if (!validate_arg (arg, REAL_TYPE))
7188 return NULL_TREE;
7190 /* Calculate the result when the argument is a constant. */
7191 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7192 return res;
7194 /* Optimize tan(atan(x)) = x. */
7195 fcode = builtin_mathfn_code (arg);
7196 if (flag_unsafe_math_optimizations
7197 && (fcode == BUILT_IN_ATAN
7198 || fcode == BUILT_IN_ATANF
7199 || fcode == BUILT_IN_ATANL))
7200 return CALL_EXPR_ARG (arg, 0);
7202 return NULL_TREE;
7205 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7206 NULL_TREE if no simplification can be made. */
7208 static tree
7209 fold_builtin_sincos (location_t loc,
7210 tree arg0, tree arg1, tree arg2)
7212 tree type;
7213 tree res, fn, call;
7215 if (!validate_arg (arg0, REAL_TYPE)
7216 || !validate_arg (arg1, POINTER_TYPE)
7217 || !validate_arg (arg2, POINTER_TYPE))
7218 return NULL_TREE;
7220 type = TREE_TYPE (arg0);
7222 /* Calculate the result when the argument is a constant. */
7223 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7224 return res;
7226 /* Canonicalize sincos to cexpi. */
7227 if (!TARGET_C99_FUNCTIONS)
7228 return NULL_TREE;
7229 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7230 if (!fn)
7231 return NULL_TREE;
7233 call = build_call_expr_loc (loc, fn, 1, arg0);
7234 call = builtin_save_expr (call);
7236 return build2 (COMPOUND_EXPR, void_type_node,
7237 build2 (MODIFY_EXPR, void_type_node,
7238 build_fold_indirect_ref_loc (loc, arg1),
7239 build1 (IMAGPART_EXPR, type, call)),
7240 build2 (MODIFY_EXPR, void_type_node,
7241 build_fold_indirect_ref_loc (loc, arg2),
7242 build1 (REALPART_EXPR, type, call)));
7245 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7246 NULL_TREE if no simplification can be made. */
7248 static tree
7249 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7251 tree rtype;
7252 tree realp, imagp, ifn;
7253 #ifdef HAVE_mpc
7254 tree res;
7255 #endif
7257 if (!validate_arg (arg0, COMPLEX_TYPE)
7258 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7259 return NULL_TREE;
7261 #ifdef HAVE_mpc
7262 /* Calculate the result when the argument is a constant. */
7263 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7264 return res;
7265 #endif
7267 rtype = TREE_TYPE (TREE_TYPE (arg0));
7269 /* In case we can figure out the real part of arg0 and it is constant zero
7270 fold to cexpi. */
7271 if (!TARGET_C99_FUNCTIONS)
7272 return NULL_TREE;
7273 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7274 if (!ifn)
7275 return NULL_TREE;
7277 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7278 && real_zerop (realp))
7280 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7281 return build_call_expr_loc (loc, ifn, 1, narg);
7284 /* In case we can easily decompose real and imaginary parts split cexp
7285 to exp (r) * cexpi (i). */
7286 if (flag_unsafe_math_optimizations
7287 && realp)
7289 tree rfn, rcall, icall;
7291 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7292 if (!rfn)
7293 return NULL_TREE;
7295 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7296 if (!imagp)
7297 return NULL_TREE;
7299 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7300 icall = builtin_save_expr (icall);
7301 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7302 rcall = builtin_save_expr (rcall);
7303 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7304 fold_build2_loc (loc, MULT_EXPR, rtype,
7305 rcall,
7306 fold_build1_loc (loc, REALPART_EXPR,
7307 rtype, icall)),
7308 fold_build2_loc (loc, MULT_EXPR, rtype,
7309 rcall,
7310 fold_build1_loc (loc, IMAGPART_EXPR,
7311 rtype, icall)));
7314 return NULL_TREE;
7317 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7318 Return NULL_TREE if no simplification can be made. */
7320 static tree
7321 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7323 if (!validate_arg (arg, REAL_TYPE))
7324 return NULL_TREE;
7326 /* Optimize trunc of constant value. */
7327 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7329 REAL_VALUE_TYPE r, x;
7330 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7332 x = TREE_REAL_CST (arg);
7333 real_trunc (&r, TYPE_MODE (type), &x);
7334 return build_real (type, r);
7337 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7340 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7341 Return NULL_TREE if no simplification can be made. */
7343 static tree
7344 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7346 if (!validate_arg (arg, REAL_TYPE))
7347 return NULL_TREE;
7349 /* Optimize floor of constant value. */
7350 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7352 REAL_VALUE_TYPE x;
7354 x = TREE_REAL_CST (arg);
7355 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7357 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7358 REAL_VALUE_TYPE r;
7360 real_floor (&r, TYPE_MODE (type), &x);
7361 return build_real (type, r);
7365 /* Fold floor (x) where x is nonnegative to trunc (x). */
7366 if (tree_expr_nonnegative_p (arg))
7368 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7369 if (truncfn)
7370 return build_call_expr_loc (loc, truncfn, 1, arg);
7373 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7376 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7377 Return NULL_TREE if no simplification can be made. */
7379 static tree
7380 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7382 if (!validate_arg (arg, REAL_TYPE))
7383 return NULL_TREE;
7385 /* Optimize ceil of constant value. */
7386 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7388 REAL_VALUE_TYPE x;
7390 x = TREE_REAL_CST (arg);
7391 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7393 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7394 REAL_VALUE_TYPE r;
7396 real_ceil (&r, TYPE_MODE (type), &x);
7397 return build_real (type, r);
7401 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7404 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7405 Return NULL_TREE if no simplification can be made. */
7407 static tree
7408 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7410 if (!validate_arg (arg, REAL_TYPE))
7411 return NULL_TREE;
7413 /* Optimize round of constant value. */
7414 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7416 REAL_VALUE_TYPE x;
7418 x = TREE_REAL_CST (arg);
7419 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7421 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7422 REAL_VALUE_TYPE r;
7424 real_round (&r, TYPE_MODE (type), &x);
7425 return build_real (type, r);
7429 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7432 /* Fold function call to builtin lround, lroundf or lroundl (or the
7433 corresponding long long versions) and other rounding functions. ARG
7434 is the argument to the call. Return NULL_TREE if no simplification
7435 can be made. */
7437 static tree
7438 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
7440 if (!validate_arg (arg, REAL_TYPE))
7441 return NULL_TREE;
7443 /* Optimize lround of constant value. */
7444 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7446 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7448 if (real_isfinite (&x))
7450 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7451 tree ftype = TREE_TYPE (arg);
7452 unsigned HOST_WIDE_INT lo2;
7453 HOST_WIDE_INT hi, lo;
7454 REAL_VALUE_TYPE r;
7456 switch (DECL_FUNCTION_CODE (fndecl))
7458 CASE_FLT_FN (BUILT_IN_LFLOOR):
7459 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7460 real_floor (&r, TYPE_MODE (ftype), &x);
7461 break;
7463 CASE_FLT_FN (BUILT_IN_LCEIL):
7464 CASE_FLT_FN (BUILT_IN_LLCEIL):
7465 real_ceil (&r, TYPE_MODE (ftype), &x);
7466 break;
7468 CASE_FLT_FN (BUILT_IN_LROUND):
7469 CASE_FLT_FN (BUILT_IN_LLROUND):
7470 real_round (&r, TYPE_MODE (ftype), &x);
7471 break;
7473 default:
7474 gcc_unreachable ();
7477 REAL_VALUE_TO_INT (&lo, &hi, r);
7478 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7479 return build_int_cst_wide (itype, lo2, hi);
7483 switch (DECL_FUNCTION_CODE (fndecl))
7485 CASE_FLT_FN (BUILT_IN_LFLOOR):
7486 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7487 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7488 if (tree_expr_nonnegative_p (arg))
7489 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7490 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7491 break;
7492 default:;
7495 return fold_fixed_mathfn (loc, fndecl, arg);
7498 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7499 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7500 the argument to the call. Return NULL_TREE if no simplification can
7501 be made. */
7503 static tree
7504 fold_builtin_bitop (tree fndecl, tree arg)
7506 if (!validate_arg (arg, INTEGER_TYPE))
7507 return NULL_TREE;
7509 /* Optimize for constant argument. */
7510 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7512 HOST_WIDE_INT hi, width, result;
7513 unsigned HOST_WIDE_INT lo;
7514 tree type;
7516 type = TREE_TYPE (arg);
7517 width = TYPE_PRECISION (type);
7518 lo = TREE_INT_CST_LOW (arg);
7520 /* Clear all the bits that are beyond the type's precision. */
7521 if (width > HOST_BITS_PER_WIDE_INT)
7523 hi = TREE_INT_CST_HIGH (arg);
7524 if (width < 2 * HOST_BITS_PER_WIDE_INT)
7525 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
7527 else
7529 hi = 0;
7530 if (width < HOST_BITS_PER_WIDE_INT)
7531 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
7534 switch (DECL_FUNCTION_CODE (fndecl))
7536 CASE_INT_FN (BUILT_IN_FFS):
7537 if (lo != 0)
7538 result = exact_log2 (lo & -lo) + 1;
7539 else if (hi != 0)
7540 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
7541 else
7542 result = 0;
7543 break;
7545 CASE_INT_FN (BUILT_IN_CLZ):
7546 if (hi != 0)
7547 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
7548 else if (lo != 0)
7549 result = width - floor_log2 (lo) - 1;
7550 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7551 result = width;
7552 break;
7554 CASE_INT_FN (BUILT_IN_CTZ):
7555 if (lo != 0)
7556 result = exact_log2 (lo & -lo);
7557 else if (hi != 0)
7558 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
7559 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
7560 result = width;
7561 break;
7563 CASE_INT_FN (BUILT_IN_POPCOUNT):
7564 result = 0;
7565 while (lo)
7566 result++, lo &= lo - 1;
7567 while (hi)
7568 result++, hi &= hi - 1;
7569 break;
7571 CASE_INT_FN (BUILT_IN_PARITY):
7572 result = 0;
7573 while (lo)
7574 result++, lo &= lo - 1;
7575 while (hi)
7576 result++, hi &= hi - 1;
7577 result &= 1;
7578 break;
7580 default:
7581 gcc_unreachable ();
7584 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
7587 return NULL_TREE;
7590 /* Fold function call to builtin_bswap and the long and long long
7591 variants. Return NULL_TREE if no simplification can be made. */
7592 static tree
7593 fold_builtin_bswap (tree fndecl, tree arg)
7595 if (! validate_arg (arg, INTEGER_TYPE))
7596 return NULL_TREE;
7598 /* Optimize constant value. */
7599 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
7601 HOST_WIDE_INT hi, width, r_hi = 0;
7602 unsigned HOST_WIDE_INT lo, r_lo = 0;
7603 tree type;
7605 type = TREE_TYPE (arg);
7606 width = TYPE_PRECISION (type);
7607 lo = TREE_INT_CST_LOW (arg);
7608 hi = TREE_INT_CST_HIGH (arg);
7610 switch (DECL_FUNCTION_CODE (fndecl))
7612 case BUILT_IN_BSWAP32:
7613 case BUILT_IN_BSWAP64:
7615 int s;
7617 for (s = 0; s < width; s += 8)
7619 int d = width - s - 8;
7620 unsigned HOST_WIDE_INT byte;
7622 if (s < HOST_BITS_PER_WIDE_INT)
7623 byte = (lo >> s) & 0xff;
7624 else
7625 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
7627 if (d < HOST_BITS_PER_WIDE_INT)
7628 r_lo |= byte << d;
7629 else
7630 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
7634 break;
7636 default:
7637 gcc_unreachable ();
7640 if (width < HOST_BITS_PER_WIDE_INT)
7641 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
7642 else
7643 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
7646 return NULL_TREE;
7649 /* A subroutine of fold_builtin to fold the various logarithmic
7650 functions. Return NULL_TREE if no simplification can me made.
7651 FUNC is the corresponding MPFR logarithm function. */
7653 static tree
7654 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
7655 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7657 if (validate_arg (arg, REAL_TYPE))
7659 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7660 tree res;
7661 const enum built_in_function fcode = builtin_mathfn_code (arg);
7663 /* Calculate the result when the argument is a constant. */
7664 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
7665 return res;
7667 /* Special case, optimize logN(expN(x)) = x. */
7668 if (flag_unsafe_math_optimizations
7669 && ((func == mpfr_log
7670 && (fcode == BUILT_IN_EXP
7671 || fcode == BUILT_IN_EXPF
7672 || fcode == BUILT_IN_EXPL))
7673 || (func == mpfr_log2
7674 && (fcode == BUILT_IN_EXP2
7675 || fcode == BUILT_IN_EXP2F
7676 || fcode == BUILT_IN_EXP2L))
7677 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
7678 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
7680 /* Optimize logN(func()) for various exponential functions. We
7681 want to determine the value "x" and the power "exponent" in
7682 order to transform logN(x**exponent) into exponent*logN(x). */
7683 if (flag_unsafe_math_optimizations)
7685 tree exponent = 0, x = 0;
7687 switch (fcode)
7689 CASE_FLT_FN (BUILT_IN_EXP):
7690 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7691 x = build_real (type, real_value_truncate (TYPE_MODE (type),
7692 dconst_e ()));
7693 exponent = CALL_EXPR_ARG (arg, 0);
7694 break;
7695 CASE_FLT_FN (BUILT_IN_EXP2):
7696 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7697 x = build_real (type, dconst2);
7698 exponent = CALL_EXPR_ARG (arg, 0);
7699 break;
7700 CASE_FLT_FN (BUILT_IN_EXP10):
7701 CASE_FLT_FN (BUILT_IN_POW10):
7702 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7704 REAL_VALUE_TYPE dconst10;
7705 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
7706 x = build_real (type, dconst10);
7708 exponent = CALL_EXPR_ARG (arg, 0);
7709 break;
7710 CASE_FLT_FN (BUILT_IN_SQRT):
7711 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7712 x = CALL_EXPR_ARG (arg, 0);
7713 exponent = build_real (type, dconsthalf);
7714 break;
7715 CASE_FLT_FN (BUILT_IN_CBRT):
7716 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7717 x = CALL_EXPR_ARG (arg, 0);
7718 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
7719 dconst_third ()));
7720 break;
7721 CASE_FLT_FN (BUILT_IN_POW):
7722 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7723 x = CALL_EXPR_ARG (arg, 0);
7724 exponent = CALL_EXPR_ARG (arg, 1);
7725 break;
7726 default:
7727 break;
7730 /* Now perform the optimization. */
7731 if (x && exponent)
7733 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
7734 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
7739 return NULL_TREE;
7742 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7743 NULL_TREE if no simplification can be made. */
7745 static tree
7746 fold_builtin_hypot (location_t loc, tree fndecl,
7747 tree arg0, tree arg1, tree type)
7749 tree res, narg0, narg1;
7751 if (!validate_arg (arg0, REAL_TYPE)
7752 || !validate_arg (arg1, REAL_TYPE))
7753 return NULL_TREE;
7755 /* Calculate the result when the argument is a constant. */
7756 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7757 return res;
7759 /* If either argument to hypot has a negate or abs, strip that off.
7760 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7761 narg0 = fold_strip_sign_ops (arg0);
7762 narg1 = fold_strip_sign_ops (arg1);
7763 if (narg0 || narg1)
7765 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
7766 narg1 ? narg1 : arg1);
7769 /* If either argument is zero, hypot is fabs of the other. */
7770 if (real_zerop (arg0))
7771 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
7772 else if (real_zerop (arg1))
7773 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
7775 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7776 if (flag_unsafe_math_optimizations
7777 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
7779 const REAL_VALUE_TYPE sqrt2_trunc
7780 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7781 return fold_build2_loc (loc, MULT_EXPR, type,
7782 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7783 build_real (type, sqrt2_trunc));
7786 return NULL_TREE;
7790 /* Fold a builtin function call to pow, powf, or powl. Return
7791 NULL_TREE if no simplification can be made. */
7792 static tree
7793 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
7795 tree res;
7797 if (!validate_arg (arg0, REAL_TYPE)
7798 || !validate_arg (arg1, REAL_TYPE))
7799 return NULL_TREE;
7801 /* Calculate the result when the argument is a constant. */
7802 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7803 return res;
7805 /* Optimize pow(1.0,y) = 1.0. */
7806 if (real_onep (arg0))
7807 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7809 if (TREE_CODE (arg1) == REAL_CST
7810 && !TREE_OVERFLOW (arg1))
7812 REAL_VALUE_TYPE cint;
7813 REAL_VALUE_TYPE c;
7814 HOST_WIDE_INT n;
7816 c = TREE_REAL_CST (arg1);
7818 /* Optimize pow(x,0.0) = 1.0. */
7819 if (REAL_VALUES_EQUAL (c, dconst0))
7820 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7821 arg0);
7823 /* Optimize pow(x,1.0) = x. */
7824 if (REAL_VALUES_EQUAL (c, dconst1))
7825 return arg0;
7827 /* Optimize pow(x,-1.0) = 1.0/x. */
7828 if (REAL_VALUES_EQUAL (c, dconstm1))
7829 return fold_build2_loc (loc, RDIV_EXPR, type,
7830 build_real (type, dconst1), arg0);
7832 /* Optimize pow(x,0.5) = sqrt(x). */
7833 if (flag_unsafe_math_optimizations
7834 && REAL_VALUES_EQUAL (c, dconsthalf))
7836 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7838 if (sqrtfn != NULL_TREE)
7839 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
7842 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7843 if (flag_unsafe_math_optimizations)
7845 const REAL_VALUE_TYPE dconstroot
7846 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7848 if (REAL_VALUES_EQUAL (c, dconstroot))
7850 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7851 if (cbrtfn != NULL_TREE)
7852 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
7856 /* Check for an integer exponent. */
7857 n = real_to_integer (&c);
7858 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
7859 if (real_identical (&c, &cint))
7861 /* Attempt to evaluate pow at compile-time, unless this should
7862 raise an exception. */
7863 if (TREE_CODE (arg0) == REAL_CST
7864 && !TREE_OVERFLOW (arg0)
7865 && (n > 0
7866 || (!flag_trapping_math && !flag_errno_math)
7867 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
7869 REAL_VALUE_TYPE x;
7870 bool inexact;
7872 x = TREE_REAL_CST (arg0);
7873 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7874 if (flag_unsafe_math_optimizations || !inexact)
7875 return build_real (type, x);
7878 /* Strip sign ops from even integer powers. */
7879 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
7881 tree narg0 = fold_strip_sign_ops (arg0);
7882 if (narg0)
7883 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
7888 if (flag_unsafe_math_optimizations)
7890 const enum built_in_function fcode = builtin_mathfn_code (arg0);
7892 /* Optimize pow(expN(x),y) = expN(x*y). */
7893 if (BUILTIN_EXPONENT_P (fcode))
7895 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7896 tree arg = CALL_EXPR_ARG (arg0, 0);
7897 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7898 return build_call_expr_loc (loc, expfn, 1, arg);
7901 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7902 if (BUILTIN_SQRT_P (fcode))
7904 tree narg0 = CALL_EXPR_ARG (arg0, 0);
7905 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7906 build_real (type, dconsthalf));
7907 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
7910 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7911 if (BUILTIN_CBRT_P (fcode))
7913 tree arg = CALL_EXPR_ARG (arg0, 0);
7914 if (tree_expr_nonnegative_p (arg))
7916 const REAL_VALUE_TYPE dconstroot
7917 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7918 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7919 build_real (type, dconstroot));
7920 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
7924 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
7925 if (fcode == BUILT_IN_POW
7926 || fcode == BUILT_IN_POWF
7927 || fcode == BUILT_IN_POWL)
7929 tree arg00 = CALL_EXPR_ARG (arg0, 0);
7930 if (tree_expr_nonnegative_p (arg00))
7932 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7933 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7934 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7939 return NULL_TREE;
7942 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
7943 Return NULL_TREE if no simplification can be made. */
7944 static tree
7945 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
7946 tree arg0, tree arg1, tree type)
7948 if (!validate_arg (arg0, REAL_TYPE)
7949 || !validate_arg (arg1, INTEGER_TYPE))
7950 return NULL_TREE;
7952 /* Optimize pow(1.0,y) = 1.0. */
7953 if (real_onep (arg0))
7954 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
7956 if (host_integerp (arg1, 0))
7958 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
7960 /* Evaluate powi at compile-time. */
7961 if (TREE_CODE (arg0) == REAL_CST
7962 && !TREE_OVERFLOW (arg0))
7964 REAL_VALUE_TYPE x;
7965 x = TREE_REAL_CST (arg0);
7966 real_powi (&x, TYPE_MODE (type), &x, c);
7967 return build_real (type, x);
7970 /* Optimize pow(x,0) = 1.0. */
7971 if (c == 0)
7972 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
7973 arg0);
7975 /* Optimize pow(x,1) = x. */
7976 if (c == 1)
7977 return arg0;
7979 /* Optimize pow(x,-1) = 1.0/x. */
7980 if (c == -1)
7981 return fold_build2_loc (loc, RDIV_EXPR, type,
7982 build_real (type, dconst1), arg0);
7985 return NULL_TREE;
7988 /* A subroutine of fold_builtin to fold the various exponent
7989 functions. Return NULL_TREE if no simplification can be made.
7990 FUNC is the corresponding MPFR exponent function. */
7992 static tree
7993 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
7994 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
7996 if (validate_arg (arg, REAL_TYPE))
7998 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7999 tree res;
8001 /* Calculate the result when the argument is a constant. */
8002 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8003 return res;
8005 /* Optimize expN(logN(x)) = x. */
8006 if (flag_unsafe_math_optimizations)
8008 const enum built_in_function fcode = builtin_mathfn_code (arg);
8010 if ((func == mpfr_exp
8011 && (fcode == BUILT_IN_LOG
8012 || fcode == BUILT_IN_LOGF
8013 || fcode == BUILT_IN_LOGL))
8014 || (func == mpfr_exp2
8015 && (fcode == BUILT_IN_LOG2
8016 || fcode == BUILT_IN_LOG2F
8017 || fcode == BUILT_IN_LOG2L))
8018 || (func == mpfr_exp10
8019 && (fcode == BUILT_IN_LOG10
8020 || fcode == BUILT_IN_LOG10F
8021 || fcode == BUILT_IN_LOG10L)))
8022 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8026 return NULL_TREE;
8029 /* Return true if VAR is a VAR_DECL or a component thereof. */
8031 static bool
8032 var_decl_component_p (tree var)
8034 tree inner = var;
8035 while (handled_component_p (inner))
8036 inner = TREE_OPERAND (inner, 0);
8037 return SSA_VAR_P (inner);
8040 /* Fold function call to builtin memset. Return
8041 NULL_TREE if no simplification can be made. */
8043 static tree
8044 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8045 tree type, bool ignore)
8047 tree var, ret, etype;
8048 unsigned HOST_WIDE_INT length, cval;
8050 if (! validate_arg (dest, POINTER_TYPE)
8051 || ! validate_arg (c, INTEGER_TYPE)
8052 || ! validate_arg (len, INTEGER_TYPE))
8053 return NULL_TREE;
8055 if (! host_integerp (len, 1))
8056 return NULL_TREE;
8058 /* If the LEN parameter is zero, return DEST. */
8059 if (integer_zerop (len))
8060 return omit_one_operand_loc (loc, type, dest, c);
8062 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8063 return NULL_TREE;
8065 var = dest;
8066 STRIP_NOPS (var);
8067 if (TREE_CODE (var) != ADDR_EXPR)
8068 return NULL_TREE;
8070 var = TREE_OPERAND (var, 0);
8071 if (TREE_THIS_VOLATILE (var))
8072 return NULL_TREE;
8074 etype = TREE_TYPE (var);
8075 if (TREE_CODE (etype) == ARRAY_TYPE)
8076 etype = TREE_TYPE (etype);
8078 if (!INTEGRAL_TYPE_P (etype)
8079 && !POINTER_TYPE_P (etype))
8080 return NULL_TREE;
8082 if (! var_decl_component_p (var))
8083 return NULL_TREE;
8085 length = tree_low_cst (len, 1);
8086 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8087 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8088 < (int) length)
8089 return NULL_TREE;
8091 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8092 return NULL_TREE;
8094 if (integer_zerop (c))
8095 cval = 0;
8096 else
8098 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8099 return NULL_TREE;
8101 cval = tree_low_cst (c, 1);
8102 cval &= 0xff;
8103 cval |= cval << 8;
8104 cval |= cval << 16;
8105 cval |= (cval << 31) << 1;
8108 ret = build_int_cst_type (etype, cval);
8109 var = build_fold_indirect_ref_loc (loc,
8110 fold_convert_loc (loc,
8111 build_pointer_type (etype),
8112 dest));
8113 ret = build2 (MODIFY_EXPR, etype, var, ret);
8114 if (ignore)
8115 return ret;
8117 return omit_one_operand_loc (loc, type, dest, ret);
8120 /* Fold function call to builtin memset. Return
8121 NULL_TREE if no simplification can be made. */
8123 static tree
8124 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8126 if (! validate_arg (dest, POINTER_TYPE)
8127 || ! validate_arg (size, INTEGER_TYPE))
8128 return NULL_TREE;
8130 if (!ignore)
8131 return NULL_TREE;
8133 /* New argument list transforming bzero(ptr x, int y) to
8134 memset(ptr x, int 0, size_t y). This is done this way
8135 so that if it isn't expanded inline, we fallback to
8136 calling bzero instead of memset. */
8138 return fold_builtin_memset (loc, dest, integer_zero_node,
8139 fold_convert_loc (loc, sizetype, size),
8140 void_type_node, ignore);
8143 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8144 NULL_TREE if no simplification can be made.
8145 If ENDP is 0, return DEST (like memcpy).
8146 If ENDP is 1, return DEST+LEN (like mempcpy).
8147 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8148 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8149 (memmove). */
8151 static tree
8152 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8153 tree len, tree type, bool ignore, int endp)
8155 tree destvar, srcvar, expr;
8157 if (! validate_arg (dest, POINTER_TYPE)
8158 || ! validate_arg (src, POINTER_TYPE)
8159 || ! validate_arg (len, INTEGER_TYPE))
8160 return NULL_TREE;
8162 /* If the LEN parameter is zero, return DEST. */
8163 if (integer_zerop (len))
8164 return omit_one_operand_loc (loc, type, dest, src);
8166 /* If SRC and DEST are the same (and not volatile), return
8167 DEST{,+LEN,+LEN-1}. */
8168 if (operand_equal_p (src, dest, 0))
8169 expr = len;
8170 else
8172 tree srctype, desttype;
8173 int src_align, dest_align;
8175 if (endp == 3)
8177 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8178 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8180 /* Both DEST and SRC must be pointer types.
8181 ??? This is what old code did. Is the testing for pointer types
8182 really mandatory?
8184 If either SRC is readonly or length is 1, we can use memcpy. */
8185 if (!dest_align || !src_align)
8186 return NULL_TREE;
8187 if (readonly_data_expr (src)
8188 || (host_integerp (len, 1)
8189 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8190 >= tree_low_cst (len, 1))))
8192 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8193 if (!fn)
8194 return NULL_TREE;
8195 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8198 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8199 srcvar = build_fold_indirect_ref_loc (loc, src);
8200 destvar = build_fold_indirect_ref_loc (loc, dest);
8201 if (srcvar
8202 && !TREE_THIS_VOLATILE (srcvar)
8203 && destvar
8204 && !TREE_THIS_VOLATILE (destvar))
8206 tree src_base, dest_base, fn;
8207 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8208 HOST_WIDE_INT size = -1;
8209 HOST_WIDE_INT maxsize = -1;
8211 src_base = srcvar;
8212 if (handled_component_p (src_base))
8213 src_base = get_ref_base_and_extent (src_base, &src_offset,
8214 &size, &maxsize);
8215 dest_base = destvar;
8216 if (handled_component_p (dest_base))
8217 dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
8218 &size, &maxsize);
8219 if (host_integerp (len, 1))
8221 maxsize = tree_low_cst (len, 1);
8222 if (maxsize
8223 > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
8224 maxsize = -1;
8225 else
8226 maxsize *= BITS_PER_UNIT;
8228 else
8229 maxsize = -1;
8230 if (SSA_VAR_P (src_base)
8231 && SSA_VAR_P (dest_base))
8233 if (operand_equal_p (src_base, dest_base, 0)
8234 && ranges_overlap_p (src_offset, maxsize,
8235 dest_offset, maxsize))
8236 return NULL_TREE;
8238 else if (TREE_CODE (src_base) == INDIRECT_REF
8239 && TREE_CODE (dest_base) == INDIRECT_REF)
8241 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8242 TREE_OPERAND (dest_base, 0), 0)
8243 || ranges_overlap_p (src_offset, maxsize,
8244 dest_offset, maxsize))
8245 return NULL_TREE;
8247 else
8248 return NULL_TREE;
8250 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8251 if (!fn)
8252 return NULL_TREE;
8253 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8255 return NULL_TREE;
8258 if (!host_integerp (len, 0))
8259 return NULL_TREE;
8260 /* FIXME:
8261 This logic lose for arguments like (type *)malloc (sizeof (type)),
8262 since we strip the casts of up to VOID return value from malloc.
8263 Perhaps we ought to inherit type from non-VOID argument here? */
8264 STRIP_NOPS (src);
8265 STRIP_NOPS (dest);
8266 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8267 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8269 tree tem = TREE_OPERAND (src, 0);
8270 STRIP_NOPS (tem);
8271 if (tem != TREE_OPERAND (src, 0))
8272 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8274 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8276 tree tem = TREE_OPERAND (dest, 0);
8277 STRIP_NOPS (tem);
8278 if (tem != TREE_OPERAND (dest, 0))
8279 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8281 srctype = TREE_TYPE (TREE_TYPE (src));
8282 if (srctype
8283 && TREE_CODE (srctype) == ARRAY_TYPE
8284 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8286 srctype = TREE_TYPE (srctype);
8287 STRIP_NOPS (src);
8288 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8290 desttype = TREE_TYPE (TREE_TYPE (dest));
8291 if (desttype
8292 && TREE_CODE (desttype) == ARRAY_TYPE
8293 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8295 desttype = TREE_TYPE (desttype);
8296 STRIP_NOPS (dest);
8297 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8299 if (!srctype || !desttype
8300 || !TYPE_SIZE_UNIT (srctype)
8301 || !TYPE_SIZE_UNIT (desttype)
8302 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8303 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8304 || TYPE_VOLATILE (srctype)
8305 || TYPE_VOLATILE (desttype))
8306 return NULL_TREE;
8308 src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8309 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8310 if (dest_align < (int) TYPE_ALIGN (desttype)
8311 || src_align < (int) TYPE_ALIGN (srctype))
8312 return NULL_TREE;
8314 if (!ignore)
8315 dest = builtin_save_expr (dest);
8317 srcvar = NULL_TREE;
8318 if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8320 srcvar = build_fold_indirect_ref_loc (loc, src);
8321 if (TREE_THIS_VOLATILE (srcvar))
8322 return NULL_TREE;
8323 else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
8324 srcvar = NULL_TREE;
8325 /* With memcpy, it is possible to bypass aliasing rules, so without
8326 this check i.e. execute/20060930-2.c would be misoptimized,
8327 because it use conflicting alias set to hold argument for the
8328 memcpy call. This check is probably unnecessary with
8329 -fno-strict-aliasing. Similarly for destvar. See also
8330 PR29286. */
8331 else if (!var_decl_component_p (srcvar))
8332 srcvar = NULL_TREE;
8335 destvar = NULL_TREE;
8336 if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8338 destvar = build_fold_indirect_ref_loc (loc, dest);
8339 if (TREE_THIS_VOLATILE (destvar))
8340 return NULL_TREE;
8341 else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
8342 destvar = NULL_TREE;
8343 else if (!var_decl_component_p (destvar))
8344 destvar = NULL_TREE;
8347 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8348 return NULL_TREE;
8350 if (srcvar == NULL_TREE)
8352 tree srcptype;
8353 if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
8354 return NULL_TREE;
8356 srctype = build_qualified_type (desttype, 0);
8357 if (src_align < (int) TYPE_ALIGN (srctype))
8359 if (AGGREGATE_TYPE_P (srctype)
8360 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
8361 return NULL_TREE;
8363 srctype = build_variant_type_copy (srctype);
8364 TYPE_ALIGN (srctype) = src_align;
8365 TYPE_USER_ALIGN (srctype) = 1;
8366 TYPE_PACKED (srctype) = 1;
8368 srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
8369 src = fold_convert_loc (loc, srcptype, src);
8370 srcvar = build_fold_indirect_ref_loc (loc, src);
8372 else if (destvar == NULL_TREE)
8374 tree destptype;
8375 if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
8376 return NULL_TREE;
8378 desttype = build_qualified_type (srctype, 0);
8379 if (dest_align < (int) TYPE_ALIGN (desttype))
8381 if (AGGREGATE_TYPE_P (desttype)
8382 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
8383 return NULL_TREE;
8385 desttype = build_variant_type_copy (desttype);
8386 TYPE_ALIGN (desttype) = dest_align;
8387 TYPE_USER_ALIGN (desttype) = 1;
8388 TYPE_PACKED (desttype) = 1;
8390 destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
8391 dest = fold_convert_loc (loc, destptype, dest);
8392 destvar = build_fold_indirect_ref_loc (loc, dest);
8395 if (srctype == desttype
8396 || (gimple_in_ssa_p (cfun)
8397 && useless_type_conversion_p (desttype, srctype)))
8398 expr = srcvar;
8399 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8400 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8401 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8402 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8403 expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
8404 else
8405 expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8406 TREE_TYPE (destvar), srcvar);
8407 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8410 if (ignore)
8411 return expr;
8413 if (endp == 0 || endp == 3)
8414 return omit_one_operand_loc (loc, type, dest, expr);
8416 if (expr == len)
8417 expr = NULL_TREE;
8419 if (endp == 2)
8420 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
8421 ssize_int (1));
8423 len = fold_convert_loc (loc, sizetype, len);
8424 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8425 dest = fold_convert_loc (loc, type, dest);
8426 if (expr)
8427 dest = omit_one_operand_loc (loc, type, dest, expr);
8428 return dest;
8431 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8432 If LEN is not NULL, it represents the length of the string to be
8433 copied. Return NULL_TREE if no simplification can be made. */
8435 tree
8436 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
8438 tree fn;
8440 if (!validate_arg (dest, POINTER_TYPE)
8441 || !validate_arg (src, POINTER_TYPE))
8442 return NULL_TREE;
8444 /* If SRC and DEST are the same (and not volatile), return DEST. */
8445 if (operand_equal_p (src, dest, 0))
8446 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
8448 if (optimize_function_for_size_p (cfun))
8449 return NULL_TREE;
8451 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8452 if (!fn)
8453 return NULL_TREE;
8455 if (!len)
8457 len = c_strlen (src, 1);
8458 if (! len || TREE_SIDE_EFFECTS (len))
8459 return NULL_TREE;
8462 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8463 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8464 build_call_expr_loc (loc, fn, 3, dest, src, len));
8467 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
8468 Return NULL_TREE if no simplification can be made. */
8470 static tree
8471 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
8473 tree fn, len, lenp1, call, type;
8475 if (!validate_arg (dest, POINTER_TYPE)
8476 || !validate_arg (src, POINTER_TYPE))
8477 return NULL_TREE;
8479 len = c_strlen (src, 1);
8480 if (!len
8481 || TREE_CODE (len) != INTEGER_CST)
8482 return NULL_TREE;
8484 if (optimize_function_for_size_p (cfun)
8485 /* If length is zero it's small enough. */
8486 && !integer_zerop (len))
8487 return NULL_TREE;
8489 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8490 if (!fn)
8491 return NULL_TREE;
8493 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
8494 /* We use dest twice in building our expression. Save it from
8495 multiple expansions. */
8496 dest = builtin_save_expr (dest);
8497 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
8499 type = TREE_TYPE (TREE_TYPE (fndecl));
8500 len = fold_convert_loc (loc, sizetype, len);
8501 dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8502 dest = fold_convert_loc (loc, type, dest);
8503 dest = omit_one_operand_loc (loc, type, dest, call);
8504 return dest;
8507 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8508 If SLEN is not NULL, it represents the length of the source string.
8509 Return NULL_TREE if no simplification can be made. */
8511 tree
8512 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
8513 tree src, tree len, tree slen)
8515 tree fn;
8517 if (!validate_arg (dest, POINTER_TYPE)
8518 || !validate_arg (src, POINTER_TYPE)
8519 || !validate_arg (len, INTEGER_TYPE))
8520 return NULL_TREE;
8522 /* If the LEN parameter is zero, return DEST. */
8523 if (integer_zerop (len))
8524 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8526 /* We can't compare slen with len as constants below if len is not a
8527 constant. */
8528 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8529 return NULL_TREE;
8531 if (!slen)
8532 slen = c_strlen (src, 1);
8534 /* Now, we must be passed a constant src ptr parameter. */
8535 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8536 return NULL_TREE;
8538 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
8540 /* We do not support simplification of this case, though we do
8541 support it when expanding trees into RTL. */
8542 /* FIXME: generate a call to __builtin_memset. */
8543 if (tree_int_cst_lt (slen, len))
8544 return NULL_TREE;
8546 /* OK transform into builtin memcpy. */
8547 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8548 if (!fn)
8549 return NULL_TREE;
8550 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
8551 build_call_expr_loc (loc, fn, 3, dest, src, len));
8554 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8555 arguments to the call, and TYPE is its return type.
8556 Return NULL_TREE if no simplification can be made. */
8558 static tree
8559 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
8561 if (!validate_arg (arg1, POINTER_TYPE)
8562 || !validate_arg (arg2, INTEGER_TYPE)
8563 || !validate_arg (len, INTEGER_TYPE))
8564 return NULL_TREE;
8565 else
8567 const char *p1;
8569 if (TREE_CODE (arg2) != INTEGER_CST
8570 || !host_integerp (len, 1))
8571 return NULL_TREE;
8573 p1 = c_getstr (arg1);
8574 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8576 char c;
8577 const char *r;
8578 tree tem;
8580 if (target_char_cast (arg2, &c))
8581 return NULL_TREE;
8583 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8585 if (r == NULL)
8586 return build_int_cst (TREE_TYPE (arg1), 0);
8588 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8589 size_int (r - p1));
8590 return fold_convert_loc (loc, type, tem);
8592 return NULL_TREE;
8596 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8597 Return NULL_TREE if no simplification can be made. */
8599 static tree
8600 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8602 const char *p1, *p2;
8604 if (!validate_arg (arg1, POINTER_TYPE)
8605 || !validate_arg (arg2, POINTER_TYPE)
8606 || !validate_arg (len, INTEGER_TYPE))
8607 return NULL_TREE;
8609 /* If the LEN parameter is zero, return zero. */
8610 if (integer_zerop (len))
8611 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8612 arg1, arg2);
8614 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8615 if (operand_equal_p (arg1, arg2, 0))
8616 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8618 p1 = c_getstr (arg1);
8619 p2 = c_getstr (arg2);
8621 /* If all arguments are constant, and the value of len is not greater
8622 than the lengths of arg1 and arg2, evaluate at compile-time. */
8623 if (host_integerp (len, 1) && p1 && p2
8624 && compare_tree_int (len, strlen (p1) + 1) <= 0
8625 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8627 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8629 if (r > 0)
8630 return integer_one_node;
8631 else if (r < 0)
8632 return integer_minus_one_node;
8633 else
8634 return integer_zero_node;
8637 /* If len parameter is one, return an expression corresponding to
8638 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8639 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8641 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8642 tree cst_uchar_ptr_node
8643 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8645 tree ind1
8646 = fold_convert_loc (loc, integer_type_node,
8647 build1 (INDIRECT_REF, cst_uchar_node,
8648 fold_convert_loc (loc,
8649 cst_uchar_ptr_node,
8650 arg1)));
8651 tree ind2
8652 = fold_convert_loc (loc, integer_type_node,
8653 build1 (INDIRECT_REF, cst_uchar_node,
8654 fold_convert_loc (loc,
8655 cst_uchar_ptr_node,
8656 arg2)));
8657 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8660 return NULL_TREE;
8663 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8664 Return NULL_TREE if no simplification can be made. */
8666 static tree
8667 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
8669 const char *p1, *p2;
8671 if (!validate_arg (arg1, POINTER_TYPE)
8672 || !validate_arg (arg2, POINTER_TYPE))
8673 return NULL_TREE;
8675 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8676 if (operand_equal_p (arg1, arg2, 0))
8677 return integer_zero_node;
8679 p1 = c_getstr (arg1);
8680 p2 = c_getstr (arg2);
8682 if (p1 && p2)
8684 const int i = strcmp (p1, p2);
8685 if (i < 0)
8686 return integer_minus_one_node;
8687 else if (i > 0)
8688 return integer_one_node;
8689 else
8690 return integer_zero_node;
8693 /* If the second arg is "", return *(const unsigned char*)arg1. */
8694 if (p2 && *p2 == '\0')
8696 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8697 tree cst_uchar_ptr_node
8698 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8700 return fold_convert_loc (loc, integer_type_node,
8701 build1 (INDIRECT_REF, cst_uchar_node,
8702 fold_convert_loc (loc,
8703 cst_uchar_ptr_node,
8704 arg1)));
8707 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8708 if (p1 && *p1 == '\0')
8710 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8711 tree cst_uchar_ptr_node
8712 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8714 tree temp
8715 = fold_convert_loc (loc, integer_type_node,
8716 build1 (INDIRECT_REF, cst_uchar_node,
8717 fold_convert_loc (loc,
8718 cst_uchar_ptr_node,
8719 arg2)));
8720 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8723 return NULL_TREE;
8726 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8727 Return NULL_TREE if no simplification can be made. */
8729 static tree
8730 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
8732 const char *p1, *p2;
8734 if (!validate_arg (arg1, POINTER_TYPE)
8735 || !validate_arg (arg2, POINTER_TYPE)
8736 || !validate_arg (len, INTEGER_TYPE))
8737 return NULL_TREE;
8739 /* If the LEN parameter is zero, return zero. */
8740 if (integer_zerop (len))
8741 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8742 arg1, arg2);
8744 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8745 if (operand_equal_p (arg1, arg2, 0))
8746 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8748 p1 = c_getstr (arg1);
8749 p2 = c_getstr (arg2);
8751 if (host_integerp (len, 1) && p1 && p2)
8753 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
8754 if (i > 0)
8755 return integer_one_node;
8756 else if (i < 0)
8757 return integer_minus_one_node;
8758 else
8759 return integer_zero_node;
8762 /* If the second arg is "", and the length is greater than zero,
8763 return *(const unsigned char*)arg1. */
8764 if (p2 && *p2 == '\0'
8765 && TREE_CODE (len) == INTEGER_CST
8766 && tree_int_cst_sgn (len) == 1)
8768 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8769 tree cst_uchar_ptr_node
8770 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8772 return fold_convert_loc (loc, integer_type_node,
8773 build1 (INDIRECT_REF, cst_uchar_node,
8774 fold_convert_loc (loc,
8775 cst_uchar_ptr_node,
8776 arg1)));
8779 /* If the first arg is "", and the length is greater than zero,
8780 return -*(const unsigned char*)arg2. */
8781 if (p1 && *p1 == '\0'
8782 && TREE_CODE (len) == INTEGER_CST
8783 && tree_int_cst_sgn (len) == 1)
8785 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8786 tree cst_uchar_ptr_node
8787 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8789 tree temp = fold_convert_loc (loc, integer_type_node,
8790 build1 (INDIRECT_REF, cst_uchar_node,
8791 fold_convert_loc (loc,
8792 cst_uchar_ptr_node,
8793 arg2)));
8794 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
8797 /* If len parameter is one, return an expression corresponding to
8798 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8799 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8801 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8802 tree cst_uchar_ptr_node
8803 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8805 tree ind1 = fold_convert_loc (loc, integer_type_node,
8806 build1 (INDIRECT_REF, cst_uchar_node,
8807 fold_convert_loc (loc,
8808 cst_uchar_ptr_node,
8809 arg1)));
8810 tree ind2 = fold_convert_loc (loc, integer_type_node,
8811 build1 (INDIRECT_REF, cst_uchar_node,
8812 fold_convert_loc (loc,
8813 cst_uchar_ptr_node,
8814 arg2)));
8815 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8818 return NULL_TREE;
8821 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8822 ARG. Return NULL_TREE if no simplification can be made. */
8824 static tree
8825 fold_builtin_signbit (location_t loc, tree arg, tree type)
8827 tree temp;
8829 if (!validate_arg (arg, REAL_TYPE))
8830 return NULL_TREE;
8832 /* If ARG is a compile-time constant, determine the result. */
8833 if (TREE_CODE (arg) == REAL_CST
8834 && !TREE_OVERFLOW (arg))
8836 REAL_VALUE_TYPE c;
8838 c = TREE_REAL_CST (arg);
8839 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
8840 return fold_convert_loc (loc, type, temp);
8843 /* If ARG is non-negative, the result is always zero. */
8844 if (tree_expr_nonnegative_p (arg))
8845 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8847 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8848 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
8849 return fold_build2_loc (loc, LT_EXPR, type, arg,
8850 build_real (TREE_TYPE (arg), dconst0));
8852 return NULL_TREE;
8855 /* Fold function call to builtin copysign, copysignf or copysignl with
8856 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8857 be made. */
8859 static tree
8860 fold_builtin_copysign (location_t loc, tree fndecl,
8861 tree arg1, tree arg2, tree type)
8863 tree tem;
8865 if (!validate_arg (arg1, REAL_TYPE)
8866 || !validate_arg (arg2, REAL_TYPE))
8867 return NULL_TREE;
8869 /* copysign(X,X) is X. */
8870 if (operand_equal_p (arg1, arg2, 0))
8871 return fold_convert_loc (loc, type, arg1);
8873 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8874 if (TREE_CODE (arg1) == REAL_CST
8875 && TREE_CODE (arg2) == REAL_CST
8876 && !TREE_OVERFLOW (arg1)
8877 && !TREE_OVERFLOW (arg2))
8879 REAL_VALUE_TYPE c1, c2;
8881 c1 = TREE_REAL_CST (arg1);
8882 c2 = TREE_REAL_CST (arg2);
8883 /* c1.sign := c2.sign. */
8884 real_copysign (&c1, &c2);
8885 return build_real (type, c1);
8888 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8889 Remember to evaluate Y for side-effects. */
8890 if (tree_expr_nonnegative_p (arg2))
8891 return omit_one_operand_loc (loc, type,
8892 fold_build1_loc (loc, ABS_EXPR, type, arg1),
8893 arg2);
8895 /* Strip sign changing operations for the first argument. */
8896 tem = fold_strip_sign_ops (arg1);
8897 if (tem)
8898 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
8900 return NULL_TREE;
8903 /* Fold a call to builtin isascii with argument ARG. */
8905 static tree
8906 fold_builtin_isascii (location_t loc, tree arg)
8908 if (!validate_arg (arg, INTEGER_TYPE))
8909 return NULL_TREE;
8910 else
8912 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8913 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8914 build_int_cst (NULL_TREE,
8915 ~ (unsigned HOST_WIDE_INT) 0x7f));
8916 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8917 arg, integer_zero_node);
8921 /* Fold a call to builtin toascii with argument ARG. */
8923 static tree
8924 fold_builtin_toascii (location_t loc, tree arg)
8926 if (!validate_arg (arg, INTEGER_TYPE))
8927 return NULL_TREE;
8929 /* Transform toascii(c) -> (c & 0x7f). */
8930 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8931 build_int_cst (NULL_TREE, 0x7f));
8934 /* Fold a call to builtin isdigit with argument ARG. */
8936 static tree
8937 fold_builtin_isdigit (location_t loc, tree arg)
8939 if (!validate_arg (arg, INTEGER_TYPE))
8940 return NULL_TREE;
8941 else
8943 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8944 /* According to the C standard, isdigit is unaffected by locale.
8945 However, it definitely is affected by the target character set. */
8946 unsigned HOST_WIDE_INT target_digit0
8947 = lang_hooks.to_target_charset ('0');
8949 if (target_digit0 == 0)
8950 return NULL_TREE;
8952 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8953 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8954 build_int_cst (unsigned_type_node, target_digit0));
8955 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8956 build_int_cst (unsigned_type_node, 9));
8960 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8962 static tree
8963 fold_builtin_fabs (location_t loc, tree arg, tree type)
8965 if (!validate_arg (arg, REAL_TYPE))
8966 return NULL_TREE;
8968 arg = fold_convert_loc (loc, type, arg);
8969 if (TREE_CODE (arg) == REAL_CST)
8970 return fold_abs_const (arg, type);
8971 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8974 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8976 static tree
8977 fold_builtin_abs (location_t loc, tree arg, tree type)
8979 if (!validate_arg (arg, INTEGER_TYPE))
8980 return NULL_TREE;
8982 arg = fold_convert_loc (loc, type, arg);
8983 if (TREE_CODE (arg) == INTEGER_CST)
8984 return fold_abs_const (arg, type);
8985 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8988 /* Fold a call to builtin fmin or fmax. */
8990 static tree
8991 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8992 tree type, bool max)
8994 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
8996 /* Calculate the result when the argument is a constant. */
8997 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8999 if (res)
9000 return res;
9002 /* If either argument is NaN, return the other one. Avoid the
9003 transformation if we get (and honor) a signalling NaN. Using
9004 omit_one_operand() ensures we create a non-lvalue. */
9005 if (TREE_CODE (arg0) == REAL_CST
9006 && real_isnan (&TREE_REAL_CST (arg0))
9007 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9008 || ! TREE_REAL_CST (arg0).signalling))
9009 return omit_one_operand_loc (loc, type, arg1, arg0);
9010 if (TREE_CODE (arg1) == REAL_CST
9011 && real_isnan (&TREE_REAL_CST (arg1))
9012 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9013 || ! TREE_REAL_CST (arg1).signalling))
9014 return omit_one_operand_loc (loc, type, arg0, arg1);
9016 /* Transform fmin/fmax(x,x) -> x. */
9017 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9018 return omit_one_operand_loc (loc, type, arg0, arg1);
9020 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9021 functions to return the numeric arg if the other one is NaN.
9022 These tree codes don't honor that, so only transform if
9023 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9024 handled, so we don't have to worry about it either. */
9025 if (flag_finite_math_only)
9026 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9027 fold_convert_loc (loc, type, arg0),
9028 fold_convert_loc (loc, type, arg1));
9030 return NULL_TREE;
9033 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9035 static tree
9036 fold_builtin_carg (location_t loc, tree arg, tree type)
9038 if (validate_arg (arg, COMPLEX_TYPE)
9039 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9041 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9043 if (atan2_fn)
9045 tree new_arg = builtin_save_expr (arg);
9046 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9047 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9048 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9052 return NULL_TREE;
9055 /* Fold a call to builtin logb/ilogb. */
9057 static tree
9058 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9060 if (! validate_arg (arg, REAL_TYPE))
9061 return NULL_TREE;
9063 STRIP_NOPS (arg);
9065 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9067 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9069 switch (value->cl)
9071 case rvc_nan:
9072 case rvc_inf:
9073 /* If arg is Inf or NaN and we're logb, return it. */
9074 if (TREE_CODE (rettype) == REAL_TYPE)
9075 return fold_convert_loc (loc, rettype, arg);
9076 /* Fall through... */
9077 case rvc_zero:
9078 /* Zero may set errno and/or raise an exception for logb, also
9079 for ilogb we don't know FP_ILOGB0. */
9080 return NULL_TREE;
9081 case rvc_normal:
9082 /* For normal numbers, proceed iff radix == 2. In GCC,
9083 normalized significands are in the range [0.5, 1.0). We
9084 want the exponent as if they were [1.0, 2.0) so get the
9085 exponent and subtract 1. */
9086 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9087 return fold_convert_loc (loc, rettype,
9088 build_int_cst (NULL_TREE,
9089 REAL_EXP (value)-1));
9090 break;
9094 return NULL_TREE;
9097 /* Fold a call to builtin significand, if radix == 2. */
9099 static tree
9100 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9102 if (! validate_arg (arg, REAL_TYPE))
9103 return NULL_TREE;
9105 STRIP_NOPS (arg);
9107 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9109 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9111 switch (value->cl)
9113 case rvc_zero:
9114 case rvc_nan:
9115 case rvc_inf:
9116 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9117 return fold_convert_loc (loc, rettype, arg);
9118 case rvc_normal:
9119 /* For normal numbers, proceed iff radix == 2. */
9120 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9122 REAL_VALUE_TYPE result = *value;
9123 /* In GCC, normalized significands are in the range [0.5,
9124 1.0). We want them to be [1.0, 2.0) so set the
9125 exponent to 1. */
9126 SET_REAL_EXP (&result, 1);
9127 return build_real (rettype, result);
9129 break;
9133 return NULL_TREE;
9136 /* Fold a call to builtin frexp, we can assume the base is 2. */
9138 static tree
9139 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9141 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9142 return NULL_TREE;
9144 STRIP_NOPS (arg0);
9146 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9147 return NULL_TREE;
9149 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9151 /* Proceed if a valid pointer type was passed in. */
9152 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9154 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9155 tree frac, exp;
9157 switch (value->cl)
9159 case rvc_zero:
9160 /* For +-0, return (*exp = 0, +-0). */
9161 exp = integer_zero_node;
9162 frac = arg0;
9163 break;
9164 case rvc_nan:
9165 case rvc_inf:
9166 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9167 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9168 case rvc_normal:
9170 /* Since the frexp function always expects base 2, and in
9171 GCC normalized significands are already in the range
9172 [0.5, 1.0), we have exactly what frexp wants. */
9173 REAL_VALUE_TYPE frac_rvt = *value;
9174 SET_REAL_EXP (&frac_rvt, 0);
9175 frac = build_real (rettype, frac_rvt);
9176 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9178 break;
9179 default:
9180 gcc_unreachable ();
9183 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9184 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9185 TREE_SIDE_EFFECTS (arg1) = 1;
9186 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9189 return NULL_TREE;
9192 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9193 then we can assume the base is two. If it's false, then we have to
9194 check the mode of the TYPE parameter in certain cases. */
9196 static tree
9197 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9198 tree type, bool ldexp)
9200 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9202 STRIP_NOPS (arg0);
9203 STRIP_NOPS (arg1);
9205 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9206 if (real_zerop (arg0) || integer_zerop (arg1)
9207 || (TREE_CODE (arg0) == REAL_CST
9208 && !real_isfinite (&TREE_REAL_CST (arg0))))
9209 return omit_one_operand_loc (loc, type, arg0, arg1);
9211 /* If both arguments are constant, then try to evaluate it. */
9212 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9213 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9214 && host_integerp (arg1, 0))
9216 /* Bound the maximum adjustment to twice the range of the
9217 mode's valid exponents. Use abs to ensure the range is
9218 positive as a sanity check. */
9219 const long max_exp_adj = 2 *
9220 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9221 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9223 /* Get the user-requested adjustment. */
9224 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9226 /* The requested adjustment must be inside this range. This
9227 is a preliminary cap to avoid things like overflow, we
9228 may still fail to compute the result for other reasons. */
9229 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9231 REAL_VALUE_TYPE initial_result;
9233 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9235 /* Ensure we didn't overflow. */
9236 if (! real_isinf (&initial_result))
9238 const REAL_VALUE_TYPE trunc_result
9239 = real_value_truncate (TYPE_MODE (type), initial_result);
9241 /* Only proceed if the target mode can hold the
9242 resulting value. */
9243 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9244 return build_real (type, trunc_result);
9250 return NULL_TREE;
9253 /* Fold a call to builtin modf. */
9255 static tree
9256 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9258 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9259 return NULL_TREE;
9261 STRIP_NOPS (arg0);
9263 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9264 return NULL_TREE;
9266 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9268 /* Proceed if a valid pointer type was passed in. */
9269 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9271 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9272 REAL_VALUE_TYPE trunc, frac;
9274 switch (value->cl)
9276 case rvc_nan:
9277 case rvc_zero:
9278 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9279 trunc = frac = *value;
9280 break;
9281 case rvc_inf:
9282 /* For +-Inf, return (*arg1 = arg0, +-0). */
9283 frac = dconst0;
9284 frac.sign = value->sign;
9285 trunc = *value;
9286 break;
9287 case rvc_normal:
9288 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9289 real_trunc (&trunc, VOIDmode, value);
9290 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9291 /* If the original number was negative and already
9292 integral, then the fractional part is -0.0. */
9293 if (value->sign && frac.cl == rvc_zero)
9294 frac.sign = value->sign;
9295 break;
9298 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9299 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9300 build_real (rettype, trunc));
9301 TREE_SIDE_EFFECTS (arg1) = 1;
9302 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9303 build_real (rettype, frac));
9306 return NULL_TREE;
9309 /* Given a location LOC, an interclass builtin function decl FNDECL
9310 and its single argument ARG, return an folded expression computing
9311 the same, or NULL_TREE if we either couldn't or didn't want to fold
9312 (the latter happen if there's an RTL instruction available). */
9314 static tree
9315 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9317 enum machine_mode mode;
9319 if (!validate_arg (arg, REAL_TYPE))
9320 return NULL_TREE;
9322 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9323 return NULL_TREE;
9325 mode = TYPE_MODE (TREE_TYPE (arg));
9327 /* If there is no optab, try generic code. */
9328 switch (DECL_FUNCTION_CODE (fndecl))
9330 tree result;
9332 CASE_FLT_FN (BUILT_IN_ISINF):
9334 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9335 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
9336 tree const type = TREE_TYPE (arg);
9337 REAL_VALUE_TYPE r;
9338 char buf[128];
9340 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9341 real_from_string (&r, buf);
9342 result = build_call_expr (isgr_fn, 2,
9343 fold_build1_loc (loc, ABS_EXPR, type, arg),
9344 build_real (type, r));
9345 return result;
9347 CASE_FLT_FN (BUILT_IN_FINITE):
9348 case BUILT_IN_ISFINITE:
9350 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9351 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9352 tree const type = TREE_TYPE (arg);
9353 REAL_VALUE_TYPE r;
9354 char buf[128];
9356 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9357 real_from_string (&r, buf);
9358 result = build_call_expr (isle_fn, 2,
9359 fold_build1_loc (loc, ABS_EXPR, type, arg),
9360 build_real (type, r));
9361 /*result = fold_build2_loc (loc, UNGT_EXPR,
9362 TREE_TYPE (TREE_TYPE (fndecl)),
9363 fold_build1_loc (loc, ABS_EXPR, type, arg),
9364 build_real (type, r));
9365 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9366 TREE_TYPE (TREE_TYPE (fndecl)),
9367 result);*/
9368 return result;
9370 case BUILT_IN_ISNORMAL:
9372 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9373 islessequal(fabs(x),DBL_MAX). */
9374 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
9375 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
9376 tree const type = TREE_TYPE (arg);
9377 REAL_VALUE_TYPE rmax, rmin;
9378 char buf[128];
9380 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9381 real_from_string (&rmax, buf);
9382 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9383 real_from_string (&rmin, buf);
9384 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9385 result = build_call_expr (isle_fn, 2, arg,
9386 build_real (type, rmax));
9387 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9388 build_call_expr (isge_fn, 2, arg,
9389 build_real (type, rmin)));
9390 return result;
9392 default:
9393 break;
9396 return NULL_TREE;
9399 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9400 ARG is the argument for the call. */
9402 static tree
9403 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9405 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9406 REAL_VALUE_TYPE r;
9408 if (!validate_arg (arg, REAL_TYPE))
9409 return NULL_TREE;
9411 switch (builtin_index)
9413 case BUILT_IN_ISINF:
9414 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9415 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9417 if (TREE_CODE (arg) == REAL_CST)
9419 r = TREE_REAL_CST (arg);
9420 if (real_isinf (&r))
9421 return real_compare (GT_EXPR, &r, &dconst0)
9422 ? integer_one_node : integer_minus_one_node;
9423 else
9424 return integer_zero_node;
9427 return NULL_TREE;
9429 case BUILT_IN_ISINF_SIGN:
9431 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9432 /* In a boolean context, GCC will fold the inner COND_EXPR to
9433 1. So e.g. "if (isinf_sign(x))" would be folded to just
9434 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9435 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9436 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9437 tree tmp = NULL_TREE;
9439 arg = builtin_save_expr (arg);
9441 if (signbit_fn && isinf_fn)
9443 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9444 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9446 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9447 signbit_call, integer_zero_node);
9448 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9449 isinf_call, integer_zero_node);
9451 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9452 integer_minus_one_node, integer_one_node);
9453 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9454 isinf_call, tmp,
9455 integer_zero_node);
9458 return tmp;
9461 case BUILT_IN_ISFINITE:
9462 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9463 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9464 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9466 if (TREE_CODE (arg) == REAL_CST)
9468 r = TREE_REAL_CST (arg);
9469 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9472 return NULL_TREE;
9474 case BUILT_IN_ISNAN:
9475 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9476 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9478 if (TREE_CODE (arg) == REAL_CST)
9480 r = TREE_REAL_CST (arg);
9481 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9484 arg = builtin_save_expr (arg);
9485 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9487 default:
9488 gcc_unreachable ();
9492 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9493 This builtin will generate code to return the appropriate floating
9494 point classification depending on the value of the floating point
9495 number passed in. The possible return values must be supplied as
9496 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9497 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9498 one floating point argument which is "type generic". */
9500 static tree
9501 fold_builtin_fpclassify (location_t loc, tree exp)
9503 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9504 arg, type, res, tmp;
9505 enum machine_mode mode;
9506 REAL_VALUE_TYPE r;
9507 char buf[128];
9509 /* Verify the required arguments in the original call. */
9510 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9511 INTEGER_TYPE, INTEGER_TYPE,
9512 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9513 return NULL_TREE;
9515 fp_nan = CALL_EXPR_ARG (exp, 0);
9516 fp_infinite = CALL_EXPR_ARG (exp, 1);
9517 fp_normal = CALL_EXPR_ARG (exp, 2);
9518 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9519 fp_zero = CALL_EXPR_ARG (exp, 4);
9520 arg = CALL_EXPR_ARG (exp, 5);
9521 type = TREE_TYPE (arg);
9522 mode = TYPE_MODE (type);
9523 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9525 /* fpclassify(x) ->
9526 isnan(x) ? FP_NAN :
9527 (fabs(x) == Inf ? FP_INFINITE :
9528 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9529 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9531 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9532 build_real (type, dconst0));
9533 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9534 tmp, fp_zero, fp_subnormal);
9536 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9537 real_from_string (&r, buf);
9538 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9539 arg, build_real (type, r));
9540 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9542 if (HONOR_INFINITIES (mode))
9544 real_inf (&r);
9545 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9546 build_real (type, r));
9547 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9548 fp_infinite, res);
9551 if (HONOR_NANS (mode))
9553 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9554 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9557 return res;
9560 /* Fold a call to an unordered comparison function such as
9561 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9562 being called and ARG0 and ARG1 are the arguments for the call.
9563 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9564 the opposite of the desired result. UNORDERED_CODE is used
9565 for modes that can hold NaNs and ORDERED_CODE is used for
9566 the rest. */
9568 static tree
9569 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9570 enum tree_code unordered_code,
9571 enum tree_code ordered_code)
9573 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9574 enum tree_code code;
9575 tree type0, type1;
9576 enum tree_code code0, code1;
9577 tree cmp_type = NULL_TREE;
9579 type0 = TREE_TYPE (arg0);
9580 type1 = TREE_TYPE (arg1);
9582 code0 = TREE_CODE (type0);
9583 code1 = TREE_CODE (type1);
9585 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9586 /* Choose the wider of two real types. */
9587 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9588 ? type0 : type1;
9589 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9590 cmp_type = type0;
9591 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9592 cmp_type = type1;
9594 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9595 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9597 if (unordered_code == UNORDERED_EXPR)
9599 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9600 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9601 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9604 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9605 : ordered_code;
9606 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9607 fold_build2_loc (loc, code, type, arg0, arg1));
9610 /* Fold a call to built-in function FNDECL with 0 arguments.
9611 IGNORE is true if the result of the function call is ignored. This
9612 function returns NULL_TREE if no simplification was possible. */
9614 static tree
9615 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9617 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9618 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9619 switch (fcode)
9621 CASE_FLT_FN (BUILT_IN_INF):
9622 case BUILT_IN_INFD32:
9623 case BUILT_IN_INFD64:
9624 case BUILT_IN_INFD128:
9625 return fold_builtin_inf (loc, type, true);
9627 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9628 return fold_builtin_inf (loc, type, false);
9630 case BUILT_IN_CLASSIFY_TYPE:
9631 return fold_builtin_classify_type (NULL_TREE);
9633 default:
9634 break;
9636 return NULL_TREE;
9639 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9640 IGNORE is true if the result of the function call is ignored. This
9641 function returns NULL_TREE if no simplification was possible. */
9643 static tree
9644 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
9646 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9647 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9648 switch (fcode)
9651 case BUILT_IN_CONSTANT_P:
9653 tree val = fold_builtin_constant_p (arg0);
9655 /* Gimplification will pull the CALL_EXPR for the builtin out of
9656 an if condition. When not optimizing, we'll not CSE it back.
9657 To avoid link error types of regressions, return false now. */
9658 if (!val && !optimize)
9659 val = integer_zero_node;
9661 return val;
9664 case BUILT_IN_CLASSIFY_TYPE:
9665 return fold_builtin_classify_type (arg0);
9667 case BUILT_IN_STRLEN:
9668 return fold_builtin_strlen (loc, arg0);
9670 CASE_FLT_FN (BUILT_IN_FABS):
9671 return fold_builtin_fabs (loc, arg0, type);
9673 case BUILT_IN_ABS:
9674 case BUILT_IN_LABS:
9675 case BUILT_IN_LLABS:
9676 case BUILT_IN_IMAXABS:
9677 return fold_builtin_abs (loc, arg0, type);
9679 CASE_FLT_FN (BUILT_IN_CONJ):
9680 if (validate_arg (arg0, COMPLEX_TYPE)
9681 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9682 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9683 break;
9685 CASE_FLT_FN (BUILT_IN_CREAL):
9686 if (validate_arg (arg0, COMPLEX_TYPE)
9687 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9688 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
9689 break;
9691 CASE_FLT_FN (BUILT_IN_CIMAG):
9692 if (validate_arg (arg0, COMPLEX_TYPE)
9693 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9694 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9695 break;
9697 CASE_FLT_FN (BUILT_IN_CCOS):
9698 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ false);
9700 CASE_FLT_FN (BUILT_IN_CCOSH):
9701 return fold_builtin_ccos(loc, arg0, type, fndecl, /*hyper=*/ true);
9703 #ifdef HAVE_mpc
9704 CASE_FLT_FN (BUILT_IN_CSIN):
9705 if (validate_arg (arg0, COMPLEX_TYPE)
9706 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9707 return do_mpc_arg1 (arg0, type, mpc_sin);
9708 break;
9710 CASE_FLT_FN (BUILT_IN_CSINH):
9711 if (validate_arg (arg0, COMPLEX_TYPE)
9712 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9713 return do_mpc_arg1 (arg0, type, mpc_sinh);
9714 break;
9716 CASE_FLT_FN (BUILT_IN_CTAN):
9717 if (validate_arg (arg0, COMPLEX_TYPE)
9718 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9719 return do_mpc_arg1 (arg0, type, mpc_tan);
9720 break;
9722 CASE_FLT_FN (BUILT_IN_CTANH):
9723 if (validate_arg (arg0, COMPLEX_TYPE)
9724 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9725 return do_mpc_arg1 (arg0, type, mpc_tanh);
9726 break;
9728 CASE_FLT_FN (BUILT_IN_CLOG):
9729 if (validate_arg (arg0, COMPLEX_TYPE)
9730 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9731 return do_mpc_arg1 (arg0, type, mpc_log);
9732 break;
9734 CASE_FLT_FN (BUILT_IN_CSQRT):
9735 if (validate_arg (arg0, COMPLEX_TYPE)
9736 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9737 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9738 break;
9740 #ifdef HAVE_mpc_arc
9741 CASE_FLT_FN (BUILT_IN_CASIN):
9742 if (validate_arg (arg0, COMPLEX_TYPE)
9743 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9744 return do_mpc_arg1 (arg0, type, mpc_asin);
9745 break;
9747 CASE_FLT_FN (BUILT_IN_CACOS):
9748 if (validate_arg (arg0, COMPLEX_TYPE)
9749 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9750 return do_mpc_arg1 (arg0, type, mpc_acos);
9751 break;
9753 CASE_FLT_FN (BUILT_IN_CATAN):
9754 if (validate_arg (arg0, COMPLEX_TYPE)
9755 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9756 return do_mpc_arg1 (arg0, type, mpc_atan);
9757 break;
9759 CASE_FLT_FN (BUILT_IN_CASINH):
9760 if (validate_arg (arg0, COMPLEX_TYPE)
9761 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9762 return do_mpc_arg1 (arg0, type, mpc_asinh);
9763 break;
9765 CASE_FLT_FN (BUILT_IN_CACOSH):
9766 if (validate_arg (arg0, COMPLEX_TYPE)
9767 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9768 return do_mpc_arg1 (arg0, type, mpc_acosh);
9769 break;
9771 CASE_FLT_FN (BUILT_IN_CATANH):
9772 if (validate_arg (arg0, COMPLEX_TYPE)
9773 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9774 return do_mpc_arg1 (arg0, type, mpc_atanh);
9775 break;
9776 #endif /* HAVE_mpc_arc */
9777 #endif /* HAVE_mpc */
9779 CASE_FLT_FN (BUILT_IN_CABS):
9780 return fold_builtin_cabs (loc, arg0, type, fndecl);
9782 CASE_FLT_FN (BUILT_IN_CARG):
9783 return fold_builtin_carg (loc, arg0, type);
9785 CASE_FLT_FN (BUILT_IN_SQRT):
9786 return fold_builtin_sqrt (loc, arg0, type);
9788 CASE_FLT_FN (BUILT_IN_CBRT):
9789 return fold_builtin_cbrt (loc, arg0, type);
9791 CASE_FLT_FN (BUILT_IN_ASIN):
9792 if (validate_arg (arg0, REAL_TYPE))
9793 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9794 &dconstm1, &dconst1, true);
9795 break;
9797 CASE_FLT_FN (BUILT_IN_ACOS):
9798 if (validate_arg (arg0, REAL_TYPE))
9799 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9800 &dconstm1, &dconst1, true);
9801 break;
9803 CASE_FLT_FN (BUILT_IN_ATAN):
9804 if (validate_arg (arg0, REAL_TYPE))
9805 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9806 break;
9808 CASE_FLT_FN (BUILT_IN_ASINH):
9809 if (validate_arg (arg0, REAL_TYPE))
9810 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9811 break;
9813 CASE_FLT_FN (BUILT_IN_ACOSH):
9814 if (validate_arg (arg0, REAL_TYPE))
9815 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9816 &dconst1, NULL, true);
9817 break;
9819 CASE_FLT_FN (BUILT_IN_ATANH):
9820 if (validate_arg (arg0, REAL_TYPE))
9821 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9822 &dconstm1, &dconst1, false);
9823 break;
9825 CASE_FLT_FN (BUILT_IN_SIN):
9826 if (validate_arg (arg0, REAL_TYPE))
9827 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9828 break;
9830 CASE_FLT_FN (BUILT_IN_COS):
9831 return fold_builtin_cos (loc, arg0, type, fndecl);
9833 CASE_FLT_FN (BUILT_IN_TAN):
9834 return fold_builtin_tan (arg0, type);
9836 CASE_FLT_FN (BUILT_IN_CEXP):
9837 return fold_builtin_cexp (loc, arg0, type);
9839 CASE_FLT_FN (BUILT_IN_CEXPI):
9840 if (validate_arg (arg0, REAL_TYPE))
9841 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9842 break;
9844 CASE_FLT_FN (BUILT_IN_SINH):
9845 if (validate_arg (arg0, REAL_TYPE))
9846 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9847 break;
9849 CASE_FLT_FN (BUILT_IN_COSH):
9850 return fold_builtin_cosh (loc, arg0, type, fndecl);
9852 CASE_FLT_FN (BUILT_IN_TANH):
9853 if (validate_arg (arg0, REAL_TYPE))
9854 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
9855 break;
9857 CASE_FLT_FN (BUILT_IN_ERF):
9858 if (validate_arg (arg0, REAL_TYPE))
9859 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
9860 break;
9862 CASE_FLT_FN (BUILT_IN_ERFC):
9863 if (validate_arg (arg0, REAL_TYPE))
9864 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
9865 break;
9867 CASE_FLT_FN (BUILT_IN_TGAMMA):
9868 if (validate_arg (arg0, REAL_TYPE))
9869 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
9870 break;
9872 CASE_FLT_FN (BUILT_IN_EXP):
9873 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
9875 CASE_FLT_FN (BUILT_IN_EXP2):
9876 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
9878 CASE_FLT_FN (BUILT_IN_EXP10):
9879 CASE_FLT_FN (BUILT_IN_POW10):
9880 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
9882 CASE_FLT_FN (BUILT_IN_EXPM1):
9883 if (validate_arg (arg0, REAL_TYPE))
9884 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
9885 break;
9887 CASE_FLT_FN (BUILT_IN_LOG):
9888 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
9890 CASE_FLT_FN (BUILT_IN_LOG2):
9891 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
9893 CASE_FLT_FN (BUILT_IN_LOG10):
9894 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
9896 CASE_FLT_FN (BUILT_IN_LOG1P):
9897 if (validate_arg (arg0, REAL_TYPE))
9898 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
9899 &dconstm1, NULL, false);
9900 break;
9902 CASE_FLT_FN (BUILT_IN_J0):
9903 if (validate_arg (arg0, REAL_TYPE))
9904 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9905 NULL, NULL, 0);
9906 break;
9908 CASE_FLT_FN (BUILT_IN_J1):
9909 if (validate_arg (arg0, REAL_TYPE))
9910 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9911 NULL, NULL, 0);
9912 break;
9914 CASE_FLT_FN (BUILT_IN_Y0):
9915 if (validate_arg (arg0, REAL_TYPE))
9916 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9917 &dconst0, NULL, false);
9918 break;
9920 CASE_FLT_FN (BUILT_IN_Y1):
9921 if (validate_arg (arg0, REAL_TYPE))
9922 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9923 &dconst0, NULL, false);
9924 break;
9926 CASE_FLT_FN (BUILT_IN_NAN):
9927 case BUILT_IN_NAND32:
9928 case BUILT_IN_NAND64:
9929 case BUILT_IN_NAND128:
9930 return fold_builtin_nan (arg0, type, true);
9932 CASE_FLT_FN (BUILT_IN_NANS):
9933 return fold_builtin_nan (arg0, type, false);
9935 CASE_FLT_FN (BUILT_IN_FLOOR):
9936 return fold_builtin_floor (loc, fndecl, arg0);
9938 CASE_FLT_FN (BUILT_IN_CEIL):
9939 return fold_builtin_ceil (loc, fndecl, arg0);
9941 CASE_FLT_FN (BUILT_IN_TRUNC):
9942 return fold_builtin_trunc (loc, fndecl, arg0);
9944 CASE_FLT_FN (BUILT_IN_ROUND):
9945 return fold_builtin_round (loc, fndecl, arg0);
9947 CASE_FLT_FN (BUILT_IN_NEARBYINT):
9948 CASE_FLT_FN (BUILT_IN_RINT):
9949 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
9951 CASE_FLT_FN (BUILT_IN_LCEIL):
9952 CASE_FLT_FN (BUILT_IN_LLCEIL):
9953 CASE_FLT_FN (BUILT_IN_LFLOOR):
9954 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9955 CASE_FLT_FN (BUILT_IN_LROUND):
9956 CASE_FLT_FN (BUILT_IN_LLROUND):
9957 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
9959 CASE_FLT_FN (BUILT_IN_LRINT):
9960 CASE_FLT_FN (BUILT_IN_LLRINT):
9961 return fold_fixed_mathfn (loc, fndecl, arg0);
9963 case BUILT_IN_BSWAP32:
9964 case BUILT_IN_BSWAP64:
9965 return fold_builtin_bswap (fndecl, arg0);
9967 CASE_INT_FN (BUILT_IN_FFS):
9968 CASE_INT_FN (BUILT_IN_CLZ):
9969 CASE_INT_FN (BUILT_IN_CTZ):
9970 CASE_INT_FN (BUILT_IN_POPCOUNT):
9971 CASE_INT_FN (BUILT_IN_PARITY):
9972 return fold_builtin_bitop (fndecl, arg0);
9974 CASE_FLT_FN (BUILT_IN_SIGNBIT):
9975 return fold_builtin_signbit (loc, arg0, type);
9977 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
9978 return fold_builtin_significand (loc, arg0, type);
9980 CASE_FLT_FN (BUILT_IN_ILOGB):
9981 CASE_FLT_FN (BUILT_IN_LOGB):
9982 return fold_builtin_logb (loc, arg0, type);
9984 case BUILT_IN_ISASCII:
9985 return fold_builtin_isascii (loc, arg0);
9987 case BUILT_IN_TOASCII:
9988 return fold_builtin_toascii (loc, arg0);
9990 case BUILT_IN_ISDIGIT:
9991 return fold_builtin_isdigit (loc, arg0);
9993 CASE_FLT_FN (BUILT_IN_FINITE):
9994 case BUILT_IN_FINITED32:
9995 case BUILT_IN_FINITED64:
9996 case BUILT_IN_FINITED128:
9997 case BUILT_IN_ISFINITE:
9999 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10000 if (ret)
10001 return ret;
10002 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10005 CASE_FLT_FN (BUILT_IN_ISINF):
10006 case BUILT_IN_ISINFD32:
10007 case BUILT_IN_ISINFD64:
10008 case BUILT_IN_ISINFD128:
10010 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10011 if (ret)
10012 return ret;
10013 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10016 case BUILT_IN_ISNORMAL:
10017 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10019 case BUILT_IN_ISINF_SIGN:
10020 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10022 CASE_FLT_FN (BUILT_IN_ISNAN):
10023 case BUILT_IN_ISNAND32:
10024 case BUILT_IN_ISNAND64:
10025 case BUILT_IN_ISNAND128:
10026 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10028 case BUILT_IN_PRINTF:
10029 case BUILT_IN_PRINTF_UNLOCKED:
10030 case BUILT_IN_VPRINTF:
10031 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10033 default:
10034 break;
10037 return NULL_TREE;
10041 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10042 IGNORE is true if the result of the function call is ignored. This
10043 function returns NULL_TREE if no simplification was possible. */
10045 static tree
10046 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10048 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10049 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10051 switch (fcode)
10053 CASE_FLT_FN (BUILT_IN_JN):
10054 if (validate_arg (arg0, INTEGER_TYPE)
10055 && validate_arg (arg1, REAL_TYPE))
10056 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10057 break;
10059 CASE_FLT_FN (BUILT_IN_YN):
10060 if (validate_arg (arg0, INTEGER_TYPE)
10061 && validate_arg (arg1, REAL_TYPE))
10062 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10063 &dconst0, false);
10064 break;
10066 CASE_FLT_FN (BUILT_IN_DREM):
10067 CASE_FLT_FN (BUILT_IN_REMAINDER):
10068 if (validate_arg (arg0, REAL_TYPE)
10069 && validate_arg(arg1, REAL_TYPE))
10070 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10071 break;
10073 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10074 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10075 if (validate_arg (arg0, REAL_TYPE)
10076 && validate_arg(arg1, POINTER_TYPE))
10077 return do_mpfr_lgamma_r (arg0, arg1, type);
10078 break;
10080 CASE_FLT_FN (BUILT_IN_ATAN2):
10081 if (validate_arg (arg0, REAL_TYPE)
10082 && validate_arg(arg1, REAL_TYPE))
10083 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10084 break;
10086 CASE_FLT_FN (BUILT_IN_FDIM):
10087 if (validate_arg (arg0, REAL_TYPE)
10088 && validate_arg(arg1, REAL_TYPE))
10089 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10090 break;
10092 CASE_FLT_FN (BUILT_IN_HYPOT):
10093 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10095 #ifdef HAVE_mpc_pow
10096 CASE_FLT_FN (BUILT_IN_CPOW):
10097 if (validate_arg (arg0, COMPLEX_TYPE)
10098 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10099 && validate_arg (arg1, COMPLEX_TYPE)
10100 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10101 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10102 break;
10103 #endif
10105 CASE_FLT_FN (BUILT_IN_LDEXP):
10106 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10107 CASE_FLT_FN (BUILT_IN_SCALBN):
10108 CASE_FLT_FN (BUILT_IN_SCALBLN):
10109 return fold_builtin_load_exponent (loc, arg0, arg1,
10110 type, /*ldexp=*/false);
10112 CASE_FLT_FN (BUILT_IN_FREXP):
10113 return fold_builtin_frexp (loc, arg0, arg1, type);
10115 CASE_FLT_FN (BUILT_IN_MODF):
10116 return fold_builtin_modf (loc, arg0, arg1, type);
10118 case BUILT_IN_BZERO:
10119 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10121 case BUILT_IN_FPUTS:
10122 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10124 case BUILT_IN_FPUTS_UNLOCKED:
10125 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10127 case BUILT_IN_STRSTR:
10128 return fold_builtin_strstr (loc, arg0, arg1, type);
10130 case BUILT_IN_STRCAT:
10131 return fold_builtin_strcat (loc, arg0, arg1);
10133 case BUILT_IN_STRSPN:
10134 return fold_builtin_strspn (loc, arg0, arg1);
10136 case BUILT_IN_STRCSPN:
10137 return fold_builtin_strcspn (loc, arg0, arg1);
10139 case BUILT_IN_STRCHR:
10140 case BUILT_IN_INDEX:
10141 return fold_builtin_strchr (loc, arg0, arg1, type);
10143 case BUILT_IN_STRRCHR:
10144 case BUILT_IN_RINDEX:
10145 return fold_builtin_strrchr (loc, arg0, arg1, type);
10147 case BUILT_IN_STRCPY:
10148 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10150 case BUILT_IN_STPCPY:
10151 if (ignore)
10153 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
10154 if (!fn)
10155 break;
10157 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10159 else
10160 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10161 break;
10163 case BUILT_IN_STRCMP:
10164 return fold_builtin_strcmp (loc, arg0, arg1);
10166 case BUILT_IN_STRPBRK:
10167 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10169 case BUILT_IN_EXPECT:
10170 return fold_builtin_expect (loc, arg0, arg1);
10172 CASE_FLT_FN (BUILT_IN_POW):
10173 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10175 CASE_FLT_FN (BUILT_IN_POWI):
10176 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10178 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10179 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10181 CASE_FLT_FN (BUILT_IN_FMIN):
10182 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10184 CASE_FLT_FN (BUILT_IN_FMAX):
10185 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10187 case BUILT_IN_ISGREATER:
10188 return fold_builtin_unordered_cmp (loc, fndecl,
10189 arg0, arg1, UNLE_EXPR, LE_EXPR);
10190 case BUILT_IN_ISGREATEREQUAL:
10191 return fold_builtin_unordered_cmp (loc, fndecl,
10192 arg0, arg1, UNLT_EXPR, LT_EXPR);
10193 case BUILT_IN_ISLESS:
10194 return fold_builtin_unordered_cmp (loc, fndecl,
10195 arg0, arg1, UNGE_EXPR, GE_EXPR);
10196 case BUILT_IN_ISLESSEQUAL:
10197 return fold_builtin_unordered_cmp (loc, fndecl,
10198 arg0, arg1, UNGT_EXPR, GT_EXPR);
10199 case BUILT_IN_ISLESSGREATER:
10200 return fold_builtin_unordered_cmp (loc, fndecl,
10201 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10202 case BUILT_IN_ISUNORDERED:
10203 return fold_builtin_unordered_cmp (loc, fndecl,
10204 arg0, arg1, UNORDERED_EXPR,
10205 NOP_EXPR);
10207 /* We do the folding for va_start in the expander. */
10208 case BUILT_IN_VA_START:
10209 break;
10211 case BUILT_IN_SPRINTF:
10212 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10214 case BUILT_IN_OBJECT_SIZE:
10215 return fold_builtin_object_size (arg0, arg1);
10217 case BUILT_IN_PRINTF:
10218 case BUILT_IN_PRINTF_UNLOCKED:
10219 case BUILT_IN_VPRINTF:
10220 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10222 case BUILT_IN_PRINTF_CHK:
10223 case BUILT_IN_VPRINTF_CHK:
10224 if (!validate_arg (arg0, INTEGER_TYPE)
10225 || TREE_SIDE_EFFECTS (arg0))
10226 return NULL_TREE;
10227 else
10228 return fold_builtin_printf (loc, fndecl,
10229 arg1, NULL_TREE, ignore, fcode);
10230 break;
10232 case BUILT_IN_FPRINTF:
10233 case BUILT_IN_FPRINTF_UNLOCKED:
10234 case BUILT_IN_VFPRINTF:
10235 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10236 ignore, fcode);
10238 default:
10239 break;
10241 return NULL_TREE;
10244 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10245 and ARG2. IGNORE is true if the result of the function call is ignored.
10246 This function returns NULL_TREE if no simplification was possible. */
10248 static tree
10249 fold_builtin_3 (location_t loc, tree fndecl,
10250 tree arg0, tree arg1, tree arg2, bool ignore)
10252 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10253 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10254 switch (fcode)
10257 CASE_FLT_FN (BUILT_IN_SINCOS):
10258 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10260 CASE_FLT_FN (BUILT_IN_FMA):
10261 if (validate_arg (arg0, REAL_TYPE)
10262 && validate_arg(arg1, REAL_TYPE)
10263 && validate_arg(arg2, REAL_TYPE))
10264 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10265 break;
10267 CASE_FLT_FN (BUILT_IN_REMQUO):
10268 if (validate_arg (arg0, REAL_TYPE)
10269 && validate_arg(arg1, REAL_TYPE)
10270 && validate_arg(arg2, POINTER_TYPE))
10271 return do_mpfr_remquo (arg0, arg1, arg2);
10272 break;
10274 case BUILT_IN_MEMSET:
10275 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10277 case BUILT_IN_BCOPY:
10278 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10279 void_type_node, true, /*endp=*/3);
10281 case BUILT_IN_MEMCPY:
10282 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10283 type, ignore, /*endp=*/0);
10285 case BUILT_IN_MEMPCPY:
10286 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10287 type, ignore, /*endp=*/1);
10289 case BUILT_IN_MEMMOVE:
10290 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10291 type, ignore, /*endp=*/3);
10293 case BUILT_IN_STRNCAT:
10294 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10296 case BUILT_IN_STRNCPY:
10297 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10299 case BUILT_IN_STRNCMP:
10300 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10302 case BUILT_IN_MEMCHR:
10303 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10305 case BUILT_IN_BCMP:
10306 case BUILT_IN_MEMCMP:
10307 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10309 case BUILT_IN_SPRINTF:
10310 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
10312 case BUILT_IN_STRCPY_CHK:
10313 case BUILT_IN_STPCPY_CHK:
10314 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
10315 ignore, fcode);
10317 case BUILT_IN_STRCAT_CHK:
10318 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
10320 case BUILT_IN_PRINTF_CHK:
10321 case BUILT_IN_VPRINTF_CHK:
10322 if (!validate_arg (arg0, INTEGER_TYPE)
10323 || TREE_SIDE_EFFECTS (arg0))
10324 return NULL_TREE;
10325 else
10326 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
10327 break;
10329 case BUILT_IN_FPRINTF:
10330 case BUILT_IN_FPRINTF_UNLOCKED:
10331 case BUILT_IN_VFPRINTF:
10332 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
10333 ignore, fcode);
10335 case BUILT_IN_FPRINTF_CHK:
10336 case BUILT_IN_VFPRINTF_CHK:
10337 if (!validate_arg (arg1, INTEGER_TYPE)
10338 || TREE_SIDE_EFFECTS (arg1))
10339 return NULL_TREE;
10340 else
10341 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
10342 ignore, fcode);
10344 default:
10345 break;
10347 return NULL_TREE;
10350 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10351 ARG2, and ARG3. IGNORE is true if the result of the function call is
10352 ignored. This function returns NULL_TREE if no simplification was
10353 possible. */
10355 static tree
10356 fold_builtin_4 (location_t loc, tree fndecl,
10357 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
10359 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10361 switch (fcode)
10363 case BUILT_IN_MEMCPY_CHK:
10364 case BUILT_IN_MEMPCPY_CHK:
10365 case BUILT_IN_MEMMOVE_CHK:
10366 case BUILT_IN_MEMSET_CHK:
10367 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
10368 NULL_TREE, ignore,
10369 DECL_FUNCTION_CODE (fndecl));
10371 case BUILT_IN_STRNCPY_CHK:
10372 return fold_builtin_strncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE);
10374 case BUILT_IN_STRNCAT_CHK:
10375 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
10377 case BUILT_IN_FPRINTF_CHK:
10378 case BUILT_IN_VFPRINTF_CHK:
10379 if (!validate_arg (arg1, INTEGER_TYPE)
10380 || TREE_SIDE_EFFECTS (arg1))
10381 return NULL_TREE;
10382 else
10383 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
10384 ignore, fcode);
10385 break;
10387 default:
10388 break;
10390 return NULL_TREE;
10393 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10394 arguments, where NARGS <= 4. IGNORE is true if the result of the
10395 function call is ignored. This function returns NULL_TREE if no
10396 simplification was possible. Note that this only folds builtins with
10397 fixed argument patterns. Foldings that do varargs-to-varargs
10398 transformations, or that match calls with more than 4 arguments,
10399 need to be handled with fold_builtin_varargs instead. */
10401 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10403 static tree
10404 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
10406 tree ret = NULL_TREE;
10408 switch (nargs)
10410 case 0:
10411 ret = fold_builtin_0 (loc, fndecl, ignore);
10412 break;
10413 case 1:
10414 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
10415 break;
10416 case 2:
10417 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
10418 break;
10419 case 3:
10420 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
10421 break;
10422 case 4:
10423 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
10424 ignore);
10425 break;
10426 default:
10427 break;
10429 if (ret)
10431 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10432 SET_EXPR_LOCATION (ret, loc);
10433 TREE_NO_WARNING (ret) = 1;
10434 return ret;
10436 return NULL_TREE;
10439 /* Builtins with folding operations that operate on "..." arguments
10440 need special handling; we need to store the arguments in a convenient
10441 data structure before attempting any folding. Fortunately there are
10442 only a few builtins that fall into this category. FNDECL is the
10443 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10444 result of the function call is ignored. */
10446 static tree
10447 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
10448 bool ignore ATTRIBUTE_UNUSED)
10450 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10451 tree ret = NULL_TREE;
10453 switch (fcode)
10455 case BUILT_IN_SPRINTF_CHK:
10456 case BUILT_IN_VSPRINTF_CHK:
10457 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
10458 break;
10460 case BUILT_IN_SNPRINTF_CHK:
10461 case BUILT_IN_VSNPRINTF_CHK:
10462 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
10463 break;
10465 case BUILT_IN_FPCLASSIFY:
10466 ret = fold_builtin_fpclassify (loc, exp);
10467 break;
10469 default:
10470 break;
10472 if (ret)
10474 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10475 SET_EXPR_LOCATION (ret, loc);
10476 TREE_NO_WARNING (ret) = 1;
10477 return ret;
10479 return NULL_TREE;
10482 /* Return true if FNDECL shouldn't be folded right now.
10483 If a built-in function has an inline attribute always_inline
10484 wrapper, defer folding it after always_inline functions have
10485 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10486 might not be performed. */
10488 static bool
10489 avoid_folding_inline_builtin (tree fndecl)
10491 return (DECL_DECLARED_INLINE_P (fndecl)
10492 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10493 && cfun
10494 && !cfun->always_inline_functions_inlined
10495 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10498 /* A wrapper function for builtin folding that prevents warnings for
10499 "statement without effect" and the like, caused by removing the
10500 call node earlier than the warning is generated. */
10502 tree
10503 fold_call_expr (location_t loc, tree exp, bool ignore)
10505 tree ret = NULL_TREE;
10506 tree fndecl = get_callee_fndecl (exp);
10507 if (fndecl
10508 && TREE_CODE (fndecl) == FUNCTION_DECL
10509 && DECL_BUILT_IN (fndecl)
10510 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10511 yet. Defer folding until we see all the arguments
10512 (after inlining). */
10513 && !CALL_EXPR_VA_ARG_PACK (exp))
10515 int nargs = call_expr_nargs (exp);
10517 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10518 instead last argument is __builtin_va_arg_pack (). Defer folding
10519 even in that case, until arguments are finalized. */
10520 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10522 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10523 if (fndecl2
10524 && TREE_CODE (fndecl2) == FUNCTION_DECL
10525 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10526 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10527 return NULL_TREE;
10530 if (avoid_folding_inline_builtin (fndecl))
10531 return NULL_TREE;
10533 /* FIXME: Don't use a list in this interface. */
10534 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10535 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10536 else
10538 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10540 tree *args = CALL_EXPR_ARGP (exp);
10541 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10543 if (!ret)
10544 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
10545 if (ret)
10546 return ret;
10549 return NULL_TREE;
10552 /* Conveniently construct a function call expression. FNDECL names the
10553 function to be called and ARGLIST is a TREE_LIST of arguments. */
10555 tree
10556 build_function_call_expr (location_t loc, tree fndecl, tree arglist)
10558 tree fntype = TREE_TYPE (fndecl);
10559 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10560 int n = list_length (arglist);
10561 tree *argarray = (tree *) alloca (n * sizeof (tree));
10562 int i;
10564 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10565 argarray[i] = TREE_VALUE (arglist);
10566 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10569 /* Conveniently construct a function call expression. FNDECL names the
10570 function to be called, N is the number of arguments, and the "..."
10571 parameters are the argument expressions. */
10573 tree
10574 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10576 va_list ap;
10577 tree fntype = TREE_TYPE (fndecl);
10578 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10579 tree *argarray = (tree *) alloca (n * sizeof (tree));
10580 int i;
10582 va_start (ap, n);
10583 for (i = 0; i < n; i++)
10584 argarray[i] = va_arg (ap, tree);
10585 va_end (ap);
10586 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10589 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10590 N arguments are passed in the array ARGARRAY. */
10592 tree
10593 fold_builtin_call_array (location_t loc, tree type,
10594 tree fn,
10595 int n,
10596 tree *argarray)
10598 tree ret = NULL_TREE;
10599 int i;
10600 tree exp;
10602 if (TREE_CODE (fn) == ADDR_EXPR)
10604 tree fndecl = TREE_OPERAND (fn, 0);
10605 if (TREE_CODE (fndecl) == FUNCTION_DECL
10606 && DECL_BUILT_IN (fndecl))
10608 /* If last argument is __builtin_va_arg_pack (), arguments to this
10609 function are not finalized yet. Defer folding until they are. */
10610 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10612 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10613 if (fndecl2
10614 && TREE_CODE (fndecl2) == FUNCTION_DECL
10615 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10616 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10617 return build_call_array_loc (loc, type, fn, n, argarray);
10619 if (avoid_folding_inline_builtin (fndecl))
10620 return build_call_array_loc (loc, type, fn, n, argarray);
10621 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10623 tree arglist = NULL_TREE;
10624 for (i = n - 1; i >= 0; i--)
10625 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10626 ret = targetm.fold_builtin (fndecl, arglist, false);
10627 if (ret)
10628 return ret;
10629 return build_call_array_loc (loc, type, fn, n, argarray);
10631 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10633 /* First try the transformations that don't require consing up
10634 an exp. */
10635 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
10636 if (ret)
10637 return ret;
10640 /* If we got this far, we need to build an exp. */
10641 exp = build_call_array_loc (loc, type, fn, n, argarray);
10642 ret = fold_builtin_varargs (loc, fndecl, exp, false);
10643 return ret ? ret : exp;
10647 return build_call_array_loc (loc, type, fn, n, argarray);
10650 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10651 along with N new arguments specified as the "..." parameters. SKIP
10652 is the number of arguments in EXP to be omitted. This function is used
10653 to do varargs-to-varargs transformations. */
10655 static tree
10656 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10658 int oldnargs = call_expr_nargs (exp);
10659 int nargs = oldnargs - skip + n;
10660 tree fntype = TREE_TYPE (fndecl);
10661 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10662 tree *buffer;
10664 if (n > 0)
10666 int i, j;
10667 va_list ap;
10669 buffer = XALLOCAVEC (tree, nargs);
10670 va_start (ap, n);
10671 for (i = 0; i < n; i++)
10672 buffer[i] = va_arg (ap, tree);
10673 va_end (ap);
10674 for (j = skip; j < oldnargs; j++, i++)
10675 buffer[i] = CALL_EXPR_ARG (exp, j);
10677 else
10678 buffer = CALL_EXPR_ARGP (exp) + skip;
10680 return fold (build_call_array_loc (loc, TREE_TYPE (exp), fn, nargs, buffer));
10683 /* Validate a single argument ARG against a tree code CODE representing
10684 a type. */
10686 static bool
10687 validate_arg (const_tree arg, enum tree_code code)
10689 if (!arg)
10690 return false;
10691 else if (code == POINTER_TYPE)
10692 return POINTER_TYPE_P (TREE_TYPE (arg));
10693 else if (code == INTEGER_TYPE)
10694 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10695 return code == TREE_CODE (TREE_TYPE (arg));
10698 /* This function validates the types of a function call argument list
10699 against a specified list of tree_codes. If the last specifier is a 0,
10700 that represents an ellipses, otherwise the last specifier must be a
10701 VOID_TYPE.
10703 This is the GIMPLE version of validate_arglist. Eventually we want to
10704 completely convert builtins.c to work from GIMPLEs and the tree based
10705 validate_arglist will then be removed. */
10707 bool
10708 validate_gimple_arglist (const_gimple call, ...)
10710 enum tree_code code;
10711 bool res = 0;
10712 va_list ap;
10713 const_tree arg;
10714 size_t i;
10716 va_start (ap, call);
10717 i = 0;
10721 code = (enum tree_code) va_arg (ap, int);
10722 switch (code)
10724 case 0:
10725 /* This signifies an ellipses, any further arguments are all ok. */
10726 res = true;
10727 goto end;
10728 case VOID_TYPE:
10729 /* This signifies an endlink, if no arguments remain, return
10730 true, otherwise return false. */
10731 res = (i == gimple_call_num_args (call));
10732 goto end;
10733 default:
10734 /* If no parameters remain or the parameter's code does not
10735 match the specified code, return false. Otherwise continue
10736 checking any remaining arguments. */
10737 arg = gimple_call_arg (call, i++);
10738 if (!validate_arg (arg, code))
10739 goto end;
10740 break;
10743 while (1);
10745 /* We need gotos here since we can only have one VA_CLOSE in a
10746 function. */
10747 end: ;
10748 va_end (ap);
10750 return res;
10753 /* This function validates the types of a function call argument list
10754 against a specified list of tree_codes. If the last specifier is a 0,
10755 that represents an ellipses, otherwise the last specifier must be a
10756 VOID_TYPE. */
10758 bool
10759 validate_arglist (const_tree callexpr, ...)
10761 enum tree_code code;
10762 bool res = 0;
10763 va_list ap;
10764 const_call_expr_arg_iterator iter;
10765 const_tree arg;
10767 va_start (ap, callexpr);
10768 init_const_call_expr_arg_iterator (callexpr, &iter);
10772 code = (enum tree_code) va_arg (ap, int);
10773 switch (code)
10775 case 0:
10776 /* This signifies an ellipses, any further arguments are all ok. */
10777 res = true;
10778 goto end;
10779 case VOID_TYPE:
10780 /* This signifies an endlink, if no arguments remain, return
10781 true, otherwise return false. */
10782 res = !more_const_call_expr_args_p (&iter);
10783 goto end;
10784 default:
10785 /* If no parameters remain or the parameter's code does not
10786 match the specified code, return false. Otherwise continue
10787 checking any remaining arguments. */
10788 arg = next_const_call_expr_arg (&iter);
10789 if (!validate_arg (arg, code))
10790 goto end;
10791 break;
10794 while (1);
10796 /* We need gotos here since we can only have one VA_CLOSE in a
10797 function. */
10798 end: ;
10799 va_end (ap);
10801 return res;
10804 /* Default target-specific builtin expander that does nothing. */
10807 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10808 rtx target ATTRIBUTE_UNUSED,
10809 rtx subtarget ATTRIBUTE_UNUSED,
10810 enum machine_mode mode ATTRIBUTE_UNUSED,
10811 int ignore ATTRIBUTE_UNUSED)
10813 return NULL_RTX;
10816 /* Returns true is EXP represents data that would potentially reside
10817 in a readonly section. */
10819 static bool
10820 readonly_data_expr (tree exp)
10822 STRIP_NOPS (exp);
10824 if (TREE_CODE (exp) != ADDR_EXPR)
10825 return false;
10827 exp = get_base_address (TREE_OPERAND (exp, 0));
10828 if (!exp)
10829 return false;
10831 /* Make sure we call decl_readonly_section only for trees it
10832 can handle (since it returns true for everything it doesn't
10833 understand). */
10834 if (TREE_CODE (exp) == STRING_CST
10835 || TREE_CODE (exp) == CONSTRUCTOR
10836 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10837 return decl_readonly_section (exp, 0);
10838 else
10839 return false;
10842 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10843 to the call, and TYPE is its return type.
10845 Return NULL_TREE if no simplification was possible, otherwise return the
10846 simplified form of the call as a tree.
10848 The simplified form may be a constant or other expression which
10849 computes the same value, but in a more efficient manner (including
10850 calls to other builtin functions).
10852 The call may contain arguments which need to be evaluated, but
10853 which are not useful to determine the result of the call. In
10854 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10855 COMPOUND_EXPR will be an argument which must be evaluated.
10856 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10857 COMPOUND_EXPR in the chain will contain the tree for the simplified
10858 form of the builtin function call. */
10860 static tree
10861 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
10863 if (!validate_arg (s1, POINTER_TYPE)
10864 || !validate_arg (s2, POINTER_TYPE))
10865 return NULL_TREE;
10866 else
10868 tree fn;
10869 const char *p1, *p2;
10871 p2 = c_getstr (s2);
10872 if (p2 == NULL)
10873 return NULL_TREE;
10875 p1 = c_getstr (s1);
10876 if (p1 != NULL)
10878 const char *r = strstr (p1, p2);
10879 tree tem;
10881 if (r == NULL)
10882 return build_int_cst (TREE_TYPE (s1), 0);
10884 /* Return an offset into the constant string argument. */
10885 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10886 s1, size_int (r - p1));
10887 return fold_convert_loc (loc, type, tem);
10890 /* The argument is const char *, and the result is char *, so we need
10891 a type conversion here to avoid a warning. */
10892 if (p2[0] == '\0')
10893 return fold_convert_loc (loc, type, s1);
10895 if (p2[1] != '\0')
10896 return NULL_TREE;
10898 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10899 if (!fn)
10900 return NULL_TREE;
10902 /* New argument list transforming strstr(s1, s2) to
10903 strchr(s1, s2[0]). */
10904 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10908 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10909 the call, and TYPE is its return type.
10911 Return NULL_TREE if no simplification was possible, otherwise return the
10912 simplified form of the call as a tree.
10914 The simplified form may be a constant or other expression which
10915 computes the same value, but in a more efficient manner (including
10916 calls to other builtin functions).
10918 The call may contain arguments which need to be evaluated, but
10919 which are not useful to determine the result of the call. In
10920 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10921 COMPOUND_EXPR will be an argument which must be evaluated.
10922 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10923 COMPOUND_EXPR in the chain will contain the tree for the simplified
10924 form of the builtin function call. */
10926 static tree
10927 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
10929 if (!validate_arg (s1, POINTER_TYPE)
10930 || !validate_arg (s2, INTEGER_TYPE))
10931 return NULL_TREE;
10932 else
10934 const char *p1;
10936 if (TREE_CODE (s2) != INTEGER_CST)
10937 return NULL_TREE;
10939 p1 = c_getstr (s1);
10940 if (p1 != NULL)
10942 char c;
10943 const char *r;
10944 tree tem;
10946 if (target_char_cast (s2, &c))
10947 return NULL_TREE;
10949 r = strchr (p1, c);
10951 if (r == NULL)
10952 return build_int_cst (TREE_TYPE (s1), 0);
10954 /* Return an offset into the constant string argument. */
10955 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
10956 s1, size_int (r - p1));
10957 return fold_convert_loc (loc, type, tem);
10959 return NULL_TREE;
10963 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10964 the call, and TYPE is its return type.
10966 Return NULL_TREE if no simplification was possible, otherwise return the
10967 simplified form of the call as a tree.
10969 The simplified form may be a constant or other expression which
10970 computes the same value, but in a more efficient manner (including
10971 calls to other builtin functions).
10973 The call may contain arguments which need to be evaluated, but
10974 which are not useful to determine the result of the call. In
10975 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10976 COMPOUND_EXPR will be an argument which must be evaluated.
10977 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10978 COMPOUND_EXPR in the chain will contain the tree for the simplified
10979 form of the builtin function call. */
10981 static tree
10982 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
10984 if (!validate_arg (s1, POINTER_TYPE)
10985 || !validate_arg (s2, INTEGER_TYPE))
10986 return NULL_TREE;
10987 else
10989 tree fn;
10990 const char *p1;
10992 if (TREE_CODE (s2) != INTEGER_CST)
10993 return NULL_TREE;
10995 p1 = c_getstr (s1);
10996 if (p1 != NULL)
10998 char c;
10999 const char *r;
11000 tree tem;
11002 if (target_char_cast (s2, &c))
11003 return NULL_TREE;
11005 r = strrchr (p1, c);
11007 if (r == NULL)
11008 return build_int_cst (TREE_TYPE (s1), 0);
11010 /* Return an offset into the constant string argument. */
11011 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11012 s1, size_int (r - p1));
11013 return fold_convert_loc (loc, type, tem);
11016 if (! integer_zerop (s2))
11017 return NULL_TREE;
11019 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11020 if (!fn)
11021 return NULL_TREE;
11023 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11024 return build_call_expr_loc (loc, fn, 2, s1, s2);
11028 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11029 to the call, and TYPE is its return type.
11031 Return NULL_TREE if no simplification was possible, otherwise return the
11032 simplified form of the call as a tree.
11034 The simplified form may be a constant or other expression which
11035 computes the same value, but in a more efficient manner (including
11036 calls to other builtin functions).
11038 The call may contain arguments which need to be evaluated, but
11039 which are not useful to determine the result of the call. In
11040 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11041 COMPOUND_EXPR will be an argument which must be evaluated.
11042 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11043 COMPOUND_EXPR in the chain will contain the tree for the simplified
11044 form of the builtin function call. */
11046 static tree
11047 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11049 if (!validate_arg (s1, POINTER_TYPE)
11050 || !validate_arg (s2, POINTER_TYPE))
11051 return NULL_TREE;
11052 else
11054 tree fn;
11055 const char *p1, *p2;
11057 p2 = c_getstr (s2);
11058 if (p2 == NULL)
11059 return NULL_TREE;
11061 p1 = c_getstr (s1);
11062 if (p1 != NULL)
11064 const char *r = strpbrk (p1, p2);
11065 tree tem;
11067 if (r == NULL)
11068 return build_int_cst (TREE_TYPE (s1), 0);
11070 /* Return an offset into the constant string argument. */
11071 tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
11072 s1, size_int (r - p1));
11073 return fold_convert_loc (loc, type, tem);
11076 if (p2[0] == '\0')
11077 /* strpbrk(x, "") == NULL.
11078 Evaluate and ignore s1 in case it had side-effects. */
11079 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11081 if (p2[1] != '\0')
11082 return NULL_TREE; /* Really call strpbrk. */
11084 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11085 if (!fn)
11086 return NULL_TREE;
11088 /* New argument list transforming strpbrk(s1, s2) to
11089 strchr(s1, s2[0]). */
11090 return build_call_expr_loc (loc, fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11094 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11095 to the call.
11097 Return NULL_TREE if no simplification was possible, otherwise return the
11098 simplified form of the call as a tree.
11100 The simplified form may be a constant or other expression which
11101 computes the same value, but in a more efficient manner (including
11102 calls to other builtin functions).
11104 The call may contain arguments which need to be evaluated, but
11105 which are not useful to determine the result of the call. In
11106 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11107 COMPOUND_EXPR will be an argument which must be evaluated.
11108 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11109 COMPOUND_EXPR in the chain will contain the tree for the simplified
11110 form of the builtin function call. */
11112 static tree
11113 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src)
11115 if (!validate_arg (dst, POINTER_TYPE)
11116 || !validate_arg (src, POINTER_TYPE))
11117 return NULL_TREE;
11118 else
11120 const char *p = c_getstr (src);
11122 /* If the string length is zero, return the dst parameter. */
11123 if (p && *p == '\0')
11124 return dst;
11126 if (optimize_insn_for_speed_p ())
11128 /* See if we can store by pieces into (dst + strlen(dst)). */
11129 tree newdst, call;
11130 tree strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11131 tree strcpy_fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11133 if (!strlen_fn || !strcpy_fn)
11134 return NULL_TREE;
11136 /* If we don't have a movstr we don't want to emit an strcpy
11137 call. We have to do that if the length of the source string
11138 isn't computable (in that case we can use memcpy probably
11139 later expanding to a sequence of mov instructions). If we
11140 have movstr instructions we can emit strcpy calls. */
11141 if (!HAVE_movstr)
11143 tree len = c_strlen (src, 1);
11144 if (! len || TREE_SIDE_EFFECTS (len))
11145 return NULL_TREE;
11148 /* Stabilize the argument list. */
11149 dst = builtin_save_expr (dst);
11151 /* Create strlen (dst). */
11152 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11153 /* Create (dst p+ strlen (dst)). */
11155 newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
11156 TREE_TYPE (dst), dst, newdst);
11157 newdst = builtin_save_expr (newdst);
11159 call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
11160 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11162 return NULL_TREE;
11166 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11167 arguments to the call.
11169 Return NULL_TREE if no simplification was possible, otherwise return the
11170 simplified form of the call as a tree.
11172 The simplified form may be a constant or other expression which
11173 computes the same value, but in a more efficient manner (including
11174 calls to other builtin functions).
11176 The call may contain arguments which need to be evaluated, but
11177 which are not useful to determine the result of the call. In
11178 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11179 COMPOUND_EXPR will be an argument which must be evaluated.
11180 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11181 COMPOUND_EXPR in the chain will contain the tree for the simplified
11182 form of the builtin function call. */
11184 static tree
11185 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11187 if (!validate_arg (dst, POINTER_TYPE)
11188 || !validate_arg (src, POINTER_TYPE)
11189 || !validate_arg (len, INTEGER_TYPE))
11190 return NULL_TREE;
11191 else
11193 const char *p = c_getstr (src);
11195 /* If the requested length is zero, or the src parameter string
11196 length is zero, return the dst parameter. */
11197 if (integer_zerop (len) || (p && *p == '\0'))
11198 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11200 /* If the requested len is greater than or equal to the string
11201 length, call strcat. */
11202 if (TREE_CODE (len) == INTEGER_CST && p
11203 && compare_tree_int (len, strlen (p)) >= 0)
11205 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11207 /* If the replacement _DECL isn't initialized, don't do the
11208 transformation. */
11209 if (!fn)
11210 return NULL_TREE;
11212 return build_call_expr_loc (loc, fn, 2, dst, src);
11214 return NULL_TREE;
11218 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11219 to the call.
11221 Return NULL_TREE if no simplification was possible, otherwise return the
11222 simplified form of the call as a tree.
11224 The simplified form may be a constant or other expression which
11225 computes the same value, but in a more efficient manner (including
11226 calls to other builtin functions).
11228 The call may contain arguments which need to be evaluated, but
11229 which are not useful to determine the result of the call. In
11230 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11231 COMPOUND_EXPR will be an argument which must be evaluated.
11232 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11233 COMPOUND_EXPR in the chain will contain the tree for the simplified
11234 form of the builtin function call. */
11236 static tree
11237 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11239 if (!validate_arg (s1, POINTER_TYPE)
11240 || !validate_arg (s2, POINTER_TYPE))
11241 return NULL_TREE;
11242 else
11244 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11246 /* If both arguments are constants, evaluate at compile-time. */
11247 if (p1 && p2)
11249 const size_t r = strspn (p1, p2);
11250 return size_int (r);
11253 /* If either argument is "", return NULL_TREE. */
11254 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11255 /* Evaluate and ignore both arguments in case either one has
11256 side-effects. */
11257 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11258 s1, s2);
11259 return NULL_TREE;
11263 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11264 to the call.
11266 Return NULL_TREE if no simplification was possible, otherwise return the
11267 simplified form of the call as a tree.
11269 The simplified form may be a constant or other expression which
11270 computes the same value, but in a more efficient manner (including
11271 calls to other builtin functions).
11273 The call may contain arguments which need to be evaluated, but
11274 which are not useful to determine the result of the call. In
11275 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11276 COMPOUND_EXPR will be an argument which must be evaluated.
11277 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11278 COMPOUND_EXPR in the chain will contain the tree for the simplified
11279 form of the builtin function call. */
11281 static tree
11282 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11284 if (!validate_arg (s1, POINTER_TYPE)
11285 || !validate_arg (s2, POINTER_TYPE))
11286 return NULL_TREE;
11287 else
11289 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11291 /* If both arguments are constants, evaluate at compile-time. */
11292 if (p1 && p2)
11294 const size_t r = strcspn (p1, p2);
11295 return size_int (r);
11298 /* If the first argument is "", return NULL_TREE. */
11299 if (p1 && *p1 == '\0')
11301 /* Evaluate and ignore argument s2 in case it has
11302 side-effects. */
11303 return omit_one_operand_loc (loc, size_type_node,
11304 size_zero_node, s2);
11307 /* If the second argument is "", return __builtin_strlen(s1). */
11308 if (p2 && *p2 == '\0')
11310 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11312 /* If the replacement _DECL isn't initialized, don't do the
11313 transformation. */
11314 if (!fn)
11315 return NULL_TREE;
11317 return build_call_expr_loc (loc, fn, 1, s1);
11319 return NULL_TREE;
11323 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11324 to the call. IGNORE is true if the value returned
11325 by the builtin will be ignored. UNLOCKED is true is true if this
11326 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11327 the known length of the string. Return NULL_TREE if no simplification
11328 was possible. */
11330 tree
11331 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11332 bool ignore, bool unlocked, tree len)
11334 /* If we're using an unlocked function, assume the other unlocked
11335 functions exist explicitly. */
11336 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11337 : implicit_built_in_decls[BUILT_IN_FPUTC];
11338 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11339 : implicit_built_in_decls[BUILT_IN_FWRITE];
11341 /* If the return value is used, don't do the transformation. */
11342 if (!ignore)
11343 return NULL_TREE;
11345 /* Verify the arguments in the original call. */
11346 if (!validate_arg (arg0, POINTER_TYPE)
11347 || !validate_arg (arg1, POINTER_TYPE))
11348 return NULL_TREE;
11350 if (! len)
11351 len = c_strlen (arg0, 0);
11353 /* Get the length of the string passed to fputs. If the length
11354 can't be determined, punt. */
11355 if (!len
11356 || TREE_CODE (len) != INTEGER_CST)
11357 return NULL_TREE;
11359 switch (compare_tree_int (len, 1))
11361 case -1: /* length is 0, delete the call entirely . */
11362 return omit_one_operand_loc (loc, integer_type_node,
11363 integer_zero_node, arg1);;
11365 case 0: /* length is 1, call fputc. */
11367 const char *p = c_getstr (arg0);
11369 if (p != NULL)
11371 if (fn_fputc)
11372 return build_call_expr_loc (loc, fn_fputc, 2,
11373 build_int_cst (NULL_TREE, p[0]), arg1);
11374 else
11375 return NULL_TREE;
11378 /* FALLTHROUGH */
11379 case 1: /* length is greater than 1, call fwrite. */
11381 /* If optimizing for size keep fputs. */
11382 if (optimize_function_for_size_p (cfun))
11383 return NULL_TREE;
11384 /* New argument list transforming fputs(string, stream) to
11385 fwrite(string, 1, len, stream). */
11386 if (fn_fwrite)
11387 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
11388 size_one_node, len, arg1);
11389 else
11390 return NULL_TREE;
11392 default:
11393 gcc_unreachable ();
11395 return NULL_TREE;
11398 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11399 produced. False otherwise. This is done so that we don't output the error
11400 or warning twice or three times. */
11402 bool
11403 fold_builtin_next_arg (tree exp, bool va_start_p)
11405 tree fntype = TREE_TYPE (current_function_decl);
11406 int nargs = call_expr_nargs (exp);
11407 tree arg;
11409 if (TYPE_ARG_TYPES (fntype) == 0
11410 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11411 == void_type_node))
11413 error ("%<va_start%> used in function with fixed args");
11414 return true;
11417 if (va_start_p)
11419 if (va_start_p && (nargs != 2))
11421 error ("wrong number of arguments to function %<va_start%>");
11422 return true;
11424 arg = CALL_EXPR_ARG (exp, 1);
11426 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11427 when we checked the arguments and if needed issued a warning. */
11428 else
11430 if (nargs == 0)
11432 /* Evidently an out of date version of <stdarg.h>; can't validate
11433 va_start's second argument, but can still work as intended. */
11434 warning (0, "%<__builtin_next_arg%> called without an argument");
11435 return true;
11437 else if (nargs > 1)
11439 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11440 return true;
11442 arg = CALL_EXPR_ARG (exp, 0);
11445 if (TREE_CODE (arg) == SSA_NAME)
11446 arg = SSA_NAME_VAR (arg);
11448 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11449 or __builtin_next_arg (0) the first time we see it, after checking
11450 the arguments and if needed issuing a warning. */
11451 if (!integer_zerop (arg))
11453 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11455 /* Strip off all nops for the sake of the comparison. This
11456 is not quite the same as STRIP_NOPS. It does more.
11457 We must also strip off INDIRECT_EXPR for C++ reference
11458 parameters. */
11459 while (CONVERT_EXPR_P (arg)
11460 || TREE_CODE (arg) == INDIRECT_REF)
11461 arg = TREE_OPERAND (arg, 0);
11462 if (arg != last_parm)
11464 /* FIXME: Sometimes with the tree optimizers we can get the
11465 not the last argument even though the user used the last
11466 argument. We just warn and set the arg to be the last
11467 argument so that we will get wrong-code because of
11468 it. */
11469 warning (0, "second parameter of %<va_start%> not last named argument");
11472 /* Undefined by C99 7.15.1.4p4 (va_start):
11473 "If the parameter parmN is declared with the register storage
11474 class, with a function or array type, or with a type that is
11475 not compatible with the type that results after application of
11476 the default argument promotions, the behavior is undefined."
11478 else if (DECL_REGISTER (arg))
11479 warning (0, "undefined behaviour when second parameter of "
11480 "%<va_start%> is declared with %<register%> storage");
11482 /* We want to verify the second parameter just once before the tree
11483 optimizers are run and then avoid keeping it in the tree,
11484 as otherwise we could warn even for correct code like:
11485 void foo (int i, ...)
11486 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11487 if (va_start_p)
11488 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11489 else
11490 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11492 return false;
11496 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11497 ORIG may be null if this is a 2-argument call. We don't attempt to
11498 simplify calls with more than 3 arguments.
11500 Return NULL_TREE if no simplification was possible, otherwise return the
11501 simplified form of the call as a tree. If IGNORED is true, it means that
11502 the caller does not use the returned value of the function. */
11504 static tree
11505 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
11506 tree orig, int ignored)
11508 tree call, retval;
11509 const char *fmt_str = NULL;
11511 /* Verify the required arguments in the original call. We deal with two
11512 types of sprintf() calls: 'sprintf (str, fmt)' and
11513 'sprintf (dest, "%s", orig)'. */
11514 if (!validate_arg (dest, POINTER_TYPE)
11515 || !validate_arg (fmt, POINTER_TYPE))
11516 return NULL_TREE;
11517 if (orig && !validate_arg (orig, POINTER_TYPE))
11518 return NULL_TREE;
11520 /* Check whether the format is a literal string constant. */
11521 fmt_str = c_getstr (fmt);
11522 if (fmt_str == NULL)
11523 return NULL_TREE;
11525 call = NULL_TREE;
11526 retval = NULL_TREE;
11528 if (!init_target_chars ())
11529 return NULL_TREE;
11531 /* If the format doesn't contain % args or %%, use strcpy. */
11532 if (strchr (fmt_str, target_percent) == NULL)
11534 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11536 if (!fn)
11537 return NULL_TREE;
11539 /* Don't optimize sprintf (buf, "abc", ptr++). */
11540 if (orig)
11541 return NULL_TREE;
11543 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11544 'format' is known to contain no % formats. */
11545 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
11546 if (!ignored)
11547 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11550 /* If the format is "%s", use strcpy if the result isn't used. */
11551 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11553 tree fn;
11554 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11556 if (!fn)
11557 return NULL_TREE;
11559 /* Don't crash on sprintf (str1, "%s"). */
11560 if (!orig)
11561 return NULL_TREE;
11563 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11564 if (!ignored)
11566 retval = c_strlen (orig, 1);
11567 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11568 return NULL_TREE;
11570 call = build_call_expr_loc (loc, fn, 2, dest, orig);
11573 if (call && retval)
11575 retval = fold_convert_loc
11576 (loc, TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11577 retval);
11578 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11580 else
11581 return call;
11584 /* Expand a call EXP to __builtin_object_size. */
11587 expand_builtin_object_size (tree exp)
11589 tree ost;
11590 int object_size_type;
11591 tree fndecl = get_callee_fndecl (exp);
11593 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11595 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11596 exp, fndecl);
11597 expand_builtin_trap ();
11598 return const0_rtx;
11601 ost = CALL_EXPR_ARG (exp, 1);
11602 STRIP_NOPS (ost);
11604 if (TREE_CODE (ost) != INTEGER_CST
11605 || tree_int_cst_sgn (ost) < 0
11606 || compare_tree_int (ost, 3) > 0)
11608 error ("%Klast argument of %D is not integer constant between 0 and 3",
11609 exp, fndecl);
11610 expand_builtin_trap ();
11611 return const0_rtx;
11614 object_size_type = tree_low_cst (ost, 0);
11616 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11619 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11620 FCODE is the BUILT_IN_* to use.
11621 Return NULL_RTX if we failed; the caller should emit a normal call,
11622 otherwise try to get the result in TARGET, if convenient (and in
11623 mode MODE if that's convenient). */
11625 static rtx
11626 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11627 enum built_in_function fcode)
11629 tree dest, src, len, size;
11631 if (!validate_arglist (exp,
11632 POINTER_TYPE,
11633 fcode == BUILT_IN_MEMSET_CHK
11634 ? INTEGER_TYPE : POINTER_TYPE,
11635 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11636 return NULL_RTX;
11638 dest = CALL_EXPR_ARG (exp, 0);
11639 src = CALL_EXPR_ARG (exp, 1);
11640 len = CALL_EXPR_ARG (exp, 2);
11641 size = CALL_EXPR_ARG (exp, 3);
11643 if (! host_integerp (size, 1))
11644 return NULL_RTX;
11646 if (host_integerp (len, 1) || integer_all_onesp (size))
11648 tree fn;
11650 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11652 warning_at (tree_nonartificial_location (exp),
11653 0, "%Kcall to %D will always overflow destination buffer",
11654 exp, get_callee_fndecl (exp));
11655 return NULL_RTX;
11658 fn = NULL_TREE;
11659 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11660 mem{cpy,pcpy,move,set} is available. */
11661 switch (fcode)
11663 case BUILT_IN_MEMCPY_CHK:
11664 fn = built_in_decls[BUILT_IN_MEMCPY];
11665 break;
11666 case BUILT_IN_MEMPCPY_CHK:
11667 fn = built_in_decls[BUILT_IN_MEMPCPY];
11668 break;
11669 case BUILT_IN_MEMMOVE_CHK:
11670 fn = built_in_decls[BUILT_IN_MEMMOVE];
11671 break;
11672 case BUILT_IN_MEMSET_CHK:
11673 fn = built_in_decls[BUILT_IN_MEMSET];
11674 break;
11675 default:
11676 break;
11679 if (! fn)
11680 return NULL_RTX;
11682 fn = build_call_nofold (fn, 3, dest, src, len);
11683 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11684 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11685 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11687 else if (fcode == BUILT_IN_MEMSET_CHK)
11688 return NULL_RTX;
11689 else
11691 unsigned int dest_align
11692 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11694 /* If DEST is not a pointer type, call the normal function. */
11695 if (dest_align == 0)
11696 return NULL_RTX;
11698 /* If SRC and DEST are the same (and not volatile), do nothing. */
11699 if (operand_equal_p (src, dest, 0))
11701 tree expr;
11703 if (fcode != BUILT_IN_MEMPCPY_CHK)
11705 /* Evaluate and ignore LEN in case it has side-effects. */
11706 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11707 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11710 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11711 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11714 /* __memmove_chk special case. */
11715 if (fcode == BUILT_IN_MEMMOVE_CHK)
11717 unsigned int src_align
11718 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11720 if (src_align == 0)
11721 return NULL_RTX;
11723 /* If src is categorized for a readonly section we can use
11724 normal __memcpy_chk. */
11725 if (readonly_data_expr (src))
11727 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11728 if (!fn)
11729 return NULL_RTX;
11730 fn = build_call_nofold (fn, 4, dest, src, len, size);
11731 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11732 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11733 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11736 return NULL_RTX;
11740 /* Emit warning if a buffer overflow is detected at compile time. */
11742 static void
11743 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11745 int is_strlen = 0;
11746 tree len, size;
11747 location_t loc = tree_nonartificial_location (exp);
11749 switch (fcode)
11751 case BUILT_IN_STRCPY_CHK:
11752 case BUILT_IN_STPCPY_CHK:
11753 /* For __strcat_chk the warning will be emitted only if overflowing
11754 by at least strlen (dest) + 1 bytes. */
11755 case BUILT_IN_STRCAT_CHK:
11756 len = CALL_EXPR_ARG (exp, 1);
11757 size = CALL_EXPR_ARG (exp, 2);
11758 is_strlen = 1;
11759 break;
11760 case BUILT_IN_STRNCAT_CHK:
11761 case BUILT_IN_STRNCPY_CHK:
11762 len = CALL_EXPR_ARG (exp, 2);
11763 size = CALL_EXPR_ARG (exp, 3);
11764 break;
11765 case BUILT_IN_SNPRINTF_CHK:
11766 case BUILT_IN_VSNPRINTF_CHK:
11767 len = CALL_EXPR_ARG (exp, 1);
11768 size = CALL_EXPR_ARG (exp, 3);
11769 break;
11770 default:
11771 gcc_unreachable ();
11774 if (!len || !size)
11775 return;
11777 if (! host_integerp (size, 1) || integer_all_onesp (size))
11778 return;
11780 if (is_strlen)
11782 len = c_strlen (len, 1);
11783 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11784 return;
11786 else if (fcode == BUILT_IN_STRNCAT_CHK)
11788 tree src = CALL_EXPR_ARG (exp, 1);
11789 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11790 return;
11791 src = c_strlen (src, 1);
11792 if (! src || ! host_integerp (src, 1))
11794 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11795 exp, get_callee_fndecl (exp));
11796 return;
11798 else if (tree_int_cst_lt (src, size))
11799 return;
11801 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11802 return;
11804 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11805 exp, get_callee_fndecl (exp));
11808 /* Emit warning if a buffer overflow is detected at compile time
11809 in __sprintf_chk/__vsprintf_chk calls. */
11811 static void
11812 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11814 tree size, len, fmt;
11815 const char *fmt_str;
11816 int nargs = call_expr_nargs (exp);
11818 /* Verify the required arguments in the original call. */
11820 if (nargs < 4)
11821 return;
11822 size = CALL_EXPR_ARG (exp, 2);
11823 fmt = CALL_EXPR_ARG (exp, 3);
11825 if (! host_integerp (size, 1) || integer_all_onesp (size))
11826 return;
11828 /* Check whether the format is a literal string constant. */
11829 fmt_str = c_getstr (fmt);
11830 if (fmt_str == NULL)
11831 return;
11833 if (!init_target_chars ())
11834 return;
11836 /* If the format doesn't contain % args or %%, we know its size. */
11837 if (strchr (fmt_str, target_percent) == 0)
11838 len = build_int_cstu (size_type_node, strlen (fmt_str));
11839 /* If the format is "%s" and first ... argument is a string literal,
11840 we know it too. */
11841 else if (fcode == BUILT_IN_SPRINTF_CHK
11842 && strcmp (fmt_str, target_percent_s) == 0)
11844 tree arg;
11846 if (nargs < 5)
11847 return;
11848 arg = CALL_EXPR_ARG (exp, 4);
11849 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11850 return;
11852 len = c_strlen (arg, 1);
11853 if (!len || ! host_integerp (len, 1))
11854 return;
11856 else
11857 return;
11859 if (! tree_int_cst_lt (len, size))
11860 warning_at (tree_nonartificial_location (exp),
11861 0, "%Kcall to %D will always overflow destination buffer",
11862 exp, get_callee_fndecl (exp));
11865 /* Emit warning if a free is called with address of a variable. */
11867 static void
11868 maybe_emit_free_warning (tree exp)
11870 tree arg = CALL_EXPR_ARG (exp, 0);
11872 STRIP_NOPS (arg);
11873 if (TREE_CODE (arg) != ADDR_EXPR)
11874 return;
11876 arg = get_base_address (TREE_OPERAND (arg, 0));
11877 if (arg == NULL || INDIRECT_REF_P (arg))
11878 return;
11880 if (SSA_VAR_P (arg))
11881 warning_at (tree_nonartificial_location (exp),
11882 0, "%Kattempt to free a non-heap object %qD", exp, arg);
11883 else
11884 warning_at (tree_nonartificial_location (exp),
11885 0, "%Kattempt to free a non-heap object", exp);
11888 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11889 if possible. */
11891 tree
11892 fold_builtin_object_size (tree ptr, tree ost)
11894 tree ret = NULL_TREE;
11895 int object_size_type;
11897 if (!validate_arg (ptr, POINTER_TYPE)
11898 || !validate_arg (ost, INTEGER_TYPE))
11899 return NULL_TREE;
11901 STRIP_NOPS (ost);
11903 if (TREE_CODE (ost) != INTEGER_CST
11904 || tree_int_cst_sgn (ost) < 0
11905 || compare_tree_int (ost, 3) > 0)
11906 return NULL_TREE;
11908 object_size_type = tree_low_cst (ost, 0);
11910 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11911 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11912 and (size_t) 0 for types 2 and 3. */
11913 if (TREE_SIDE_EFFECTS (ptr))
11914 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11916 if (TREE_CODE (ptr) == ADDR_EXPR)
11917 ret = build_int_cstu (size_type_node,
11918 compute_builtin_object_size (ptr, object_size_type));
11920 else if (TREE_CODE (ptr) == SSA_NAME)
11922 unsigned HOST_WIDE_INT bytes;
11924 /* If object size is not known yet, delay folding until
11925 later. Maybe subsequent passes will help determining
11926 it. */
11927 bytes = compute_builtin_object_size (ptr, object_size_type);
11928 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11929 ? -1 : 0))
11930 ret = build_int_cstu (size_type_node, bytes);
11933 if (ret)
11935 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11936 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11937 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11938 ret = NULL_TREE;
11941 return ret;
11944 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11945 DEST, SRC, LEN, and SIZE are the arguments to the call.
11946 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11947 code of the builtin. If MAXLEN is not NULL, it is maximum length
11948 passed as third argument. */
11950 tree
11951 fold_builtin_memory_chk (location_t loc, tree fndecl,
11952 tree dest, tree src, tree len, tree size,
11953 tree maxlen, bool ignore,
11954 enum built_in_function fcode)
11956 tree fn;
11958 if (!validate_arg (dest, POINTER_TYPE)
11959 || !validate_arg (src,
11960 (fcode == BUILT_IN_MEMSET_CHK
11961 ? INTEGER_TYPE : POINTER_TYPE))
11962 || !validate_arg (len, INTEGER_TYPE)
11963 || !validate_arg (size, INTEGER_TYPE))
11964 return NULL_TREE;
11966 /* If SRC and DEST are the same (and not volatile), return DEST
11967 (resp. DEST+LEN for __mempcpy_chk). */
11968 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11970 if (fcode != BUILT_IN_MEMPCPY_CHK)
11971 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
11972 dest, len);
11973 else
11975 tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
11976 dest, len);
11977 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
11981 if (! host_integerp (size, 1))
11982 return NULL_TREE;
11984 if (! integer_all_onesp (size))
11986 if (! host_integerp (len, 1))
11988 /* If LEN is not constant, try MAXLEN too.
11989 For MAXLEN only allow optimizing into non-_ocs function
11990 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11991 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11993 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11995 /* (void) __mempcpy_chk () can be optimized into
11996 (void) __memcpy_chk (). */
11997 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11998 if (!fn)
11999 return NULL_TREE;
12001 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12003 return NULL_TREE;
12006 else
12007 maxlen = len;
12009 if (tree_int_cst_lt (size, maxlen))
12010 return NULL_TREE;
12013 fn = NULL_TREE;
12014 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12015 mem{cpy,pcpy,move,set} is available. */
12016 switch (fcode)
12018 case BUILT_IN_MEMCPY_CHK:
12019 fn = built_in_decls[BUILT_IN_MEMCPY];
12020 break;
12021 case BUILT_IN_MEMPCPY_CHK:
12022 fn = built_in_decls[BUILT_IN_MEMPCPY];
12023 break;
12024 case BUILT_IN_MEMMOVE_CHK:
12025 fn = built_in_decls[BUILT_IN_MEMMOVE];
12026 break;
12027 case BUILT_IN_MEMSET_CHK:
12028 fn = built_in_decls[BUILT_IN_MEMSET];
12029 break;
12030 default:
12031 break;
12034 if (!fn)
12035 return NULL_TREE;
12037 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12040 /* Fold a call to the __st[rp]cpy_chk builtin.
12041 DEST, SRC, and SIZE are the arguments to the call.
12042 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12043 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12044 strings passed as second argument. */
12046 tree
12047 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12048 tree src, tree size,
12049 tree maxlen, bool ignore,
12050 enum built_in_function fcode)
12052 tree len, fn;
12054 if (!validate_arg (dest, POINTER_TYPE)
12055 || !validate_arg (src, POINTER_TYPE)
12056 || !validate_arg (size, INTEGER_TYPE))
12057 return NULL_TREE;
12059 /* If SRC and DEST are the same (and not volatile), return DEST. */
12060 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12061 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12063 if (! host_integerp (size, 1))
12064 return NULL_TREE;
12066 if (! integer_all_onesp (size))
12068 len = c_strlen (src, 1);
12069 if (! len || ! host_integerp (len, 1))
12071 /* If LEN is not constant, try MAXLEN too.
12072 For MAXLEN only allow optimizing into non-_ocs function
12073 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12074 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12076 if (fcode == BUILT_IN_STPCPY_CHK)
12078 if (! ignore)
12079 return NULL_TREE;
12081 /* If return value of __stpcpy_chk is ignored,
12082 optimize into __strcpy_chk. */
12083 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12084 if (!fn)
12085 return NULL_TREE;
12087 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12090 if (! len || TREE_SIDE_EFFECTS (len))
12091 return NULL_TREE;
12093 /* If c_strlen returned something, but not a constant,
12094 transform __strcpy_chk into __memcpy_chk. */
12095 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12096 if (!fn)
12097 return NULL_TREE;
12099 len = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
12100 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12101 build_call_expr_loc (loc, fn, 4,
12102 dest, src, len, size));
12105 else
12106 maxlen = len;
12108 if (! tree_int_cst_lt (maxlen, size))
12109 return NULL_TREE;
12112 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12113 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12114 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12115 if (!fn)
12116 return NULL_TREE;
12118 return build_call_expr_loc (loc, fn, 2, dest, src);
12121 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12122 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12123 length passed as third argument. */
12125 tree
12126 fold_builtin_strncpy_chk (location_t loc, tree dest, tree src,
12127 tree len, tree size, tree maxlen)
12129 tree fn;
12131 if (!validate_arg (dest, POINTER_TYPE)
12132 || !validate_arg (src, POINTER_TYPE)
12133 || !validate_arg (len, INTEGER_TYPE)
12134 || !validate_arg (size, INTEGER_TYPE))
12135 return NULL_TREE;
12137 if (! host_integerp (size, 1))
12138 return NULL_TREE;
12140 if (! integer_all_onesp (size))
12142 if (! host_integerp (len, 1))
12144 /* If LEN is not constant, try MAXLEN too.
12145 For MAXLEN only allow optimizing into non-_ocs function
12146 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12147 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12148 return NULL_TREE;
12150 else
12151 maxlen = len;
12153 if (tree_int_cst_lt (size, maxlen))
12154 return NULL_TREE;
12157 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12158 fn = built_in_decls[BUILT_IN_STRNCPY];
12159 if (!fn)
12160 return NULL_TREE;
12162 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12165 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12166 are the arguments to the call. */
12168 static tree
12169 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12170 tree src, tree size)
12172 tree fn;
12173 const char *p;
12175 if (!validate_arg (dest, POINTER_TYPE)
12176 || !validate_arg (src, POINTER_TYPE)
12177 || !validate_arg (size, INTEGER_TYPE))
12178 return NULL_TREE;
12180 p = c_getstr (src);
12181 /* If the SRC parameter is "", return DEST. */
12182 if (p && *p == '\0')
12183 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12185 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12186 return NULL_TREE;
12188 /* If __builtin_strcat_chk is used, assume strcat is available. */
12189 fn = built_in_decls[BUILT_IN_STRCAT];
12190 if (!fn)
12191 return NULL_TREE;
12193 return build_call_expr_loc (loc, fn, 2, dest, src);
12196 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12197 LEN, and SIZE. */
12199 static tree
12200 fold_builtin_strncat_chk (location_t loc, tree fndecl,
12201 tree dest, tree src, tree len, tree size)
12203 tree fn;
12204 const char *p;
12206 if (!validate_arg (dest, POINTER_TYPE)
12207 || !validate_arg (src, POINTER_TYPE)
12208 || !validate_arg (size, INTEGER_TYPE)
12209 || !validate_arg (size, INTEGER_TYPE))
12210 return NULL_TREE;
12212 p = c_getstr (src);
12213 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12214 if (p && *p == '\0')
12215 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12216 else if (integer_zerop (len))
12217 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12219 if (! host_integerp (size, 1))
12220 return NULL_TREE;
12222 if (! integer_all_onesp (size))
12224 tree src_len = c_strlen (src, 1);
12225 if (src_len
12226 && host_integerp (src_len, 1)
12227 && host_integerp (len, 1)
12228 && ! tree_int_cst_lt (len, src_len))
12230 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12231 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12232 if (!fn)
12233 return NULL_TREE;
12235 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12237 return NULL_TREE;
12240 /* If __builtin_strncat_chk is used, assume strncat is available. */
12241 fn = built_in_decls[BUILT_IN_STRNCAT];
12242 if (!fn)
12243 return NULL_TREE;
12245 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12248 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12249 a normal call should be emitted rather than expanding the function
12250 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12252 static tree
12253 fold_builtin_sprintf_chk (location_t loc, tree exp,
12254 enum built_in_function fcode)
12256 tree dest, size, len, fn, fmt, flag;
12257 const char *fmt_str;
12258 int nargs = call_expr_nargs (exp);
12260 /* Verify the required arguments in the original call. */
12261 if (nargs < 4)
12262 return NULL_TREE;
12263 dest = CALL_EXPR_ARG (exp, 0);
12264 if (!validate_arg (dest, POINTER_TYPE))
12265 return NULL_TREE;
12266 flag = CALL_EXPR_ARG (exp, 1);
12267 if (!validate_arg (flag, INTEGER_TYPE))
12268 return NULL_TREE;
12269 size = CALL_EXPR_ARG (exp, 2);
12270 if (!validate_arg (size, INTEGER_TYPE))
12271 return NULL_TREE;
12272 fmt = CALL_EXPR_ARG (exp, 3);
12273 if (!validate_arg (fmt, POINTER_TYPE))
12274 return NULL_TREE;
12276 if (! host_integerp (size, 1))
12277 return NULL_TREE;
12279 len = NULL_TREE;
12281 if (!init_target_chars ())
12282 return NULL_TREE;
12284 /* Check whether the format is a literal string constant. */
12285 fmt_str = c_getstr (fmt);
12286 if (fmt_str != NULL)
12288 /* If the format doesn't contain % args or %%, we know the size. */
12289 if (strchr (fmt_str, target_percent) == 0)
12291 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12292 len = build_int_cstu (size_type_node, strlen (fmt_str));
12294 /* If the format is "%s" and first ... argument is a string literal,
12295 we know the size too. */
12296 else if (fcode == BUILT_IN_SPRINTF_CHK
12297 && strcmp (fmt_str, target_percent_s) == 0)
12299 tree arg;
12301 if (nargs == 5)
12303 arg = CALL_EXPR_ARG (exp, 4);
12304 if (validate_arg (arg, POINTER_TYPE))
12306 len = c_strlen (arg, 1);
12307 if (! len || ! host_integerp (len, 1))
12308 len = NULL_TREE;
12314 if (! integer_all_onesp (size))
12316 if (! len || ! tree_int_cst_lt (len, size))
12317 return NULL_TREE;
12320 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12321 or if format doesn't contain % chars or is "%s". */
12322 if (! integer_zerop (flag))
12324 if (fmt_str == NULL)
12325 return NULL_TREE;
12326 if (strchr (fmt_str, target_percent) != NULL
12327 && strcmp (fmt_str, target_percent_s))
12328 return NULL_TREE;
12331 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12332 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12333 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12334 if (!fn)
12335 return NULL_TREE;
12337 return rewrite_call_expr (loc, exp, 4, fn, 2, dest, fmt);
12340 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12341 a normal call should be emitted rather than expanding the function
12342 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12343 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12344 passed as second argument. */
12346 tree
12347 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
12348 enum built_in_function fcode)
12350 tree dest, size, len, fn, fmt, flag;
12351 const char *fmt_str;
12353 /* Verify the required arguments in the original call. */
12354 if (call_expr_nargs (exp) < 5)
12355 return NULL_TREE;
12356 dest = CALL_EXPR_ARG (exp, 0);
12357 if (!validate_arg (dest, POINTER_TYPE))
12358 return NULL_TREE;
12359 len = CALL_EXPR_ARG (exp, 1);
12360 if (!validate_arg (len, INTEGER_TYPE))
12361 return NULL_TREE;
12362 flag = CALL_EXPR_ARG (exp, 2);
12363 if (!validate_arg (flag, INTEGER_TYPE))
12364 return NULL_TREE;
12365 size = CALL_EXPR_ARG (exp, 3);
12366 if (!validate_arg (size, INTEGER_TYPE))
12367 return NULL_TREE;
12368 fmt = CALL_EXPR_ARG (exp, 4);
12369 if (!validate_arg (fmt, POINTER_TYPE))
12370 return NULL_TREE;
12372 if (! host_integerp (size, 1))
12373 return NULL_TREE;
12375 if (! integer_all_onesp (size))
12377 if (! host_integerp (len, 1))
12379 /* If LEN is not constant, try MAXLEN too.
12380 For MAXLEN only allow optimizing into non-_ocs function
12381 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12382 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12383 return NULL_TREE;
12385 else
12386 maxlen = len;
12388 if (tree_int_cst_lt (size, maxlen))
12389 return NULL_TREE;
12392 if (!init_target_chars ())
12393 return NULL_TREE;
12395 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12396 or if format doesn't contain % chars or is "%s". */
12397 if (! integer_zerop (flag))
12399 fmt_str = c_getstr (fmt);
12400 if (fmt_str == NULL)
12401 return NULL_TREE;
12402 if (strchr (fmt_str, target_percent) != NULL
12403 && strcmp (fmt_str, target_percent_s))
12404 return NULL_TREE;
12407 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12408 available. */
12409 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12410 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12411 if (!fn)
12412 return NULL_TREE;
12414 return rewrite_call_expr (loc, exp, 5, fn, 3, dest, len, fmt);
12417 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12418 FMT and ARG are the arguments to the call; we don't fold cases with
12419 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12421 Return NULL_TREE if no simplification was possible, otherwise return the
12422 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12423 code of the function to be simplified. */
12425 static tree
12426 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
12427 tree arg, bool ignore,
12428 enum built_in_function fcode)
12430 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12431 const char *fmt_str = NULL;
12433 /* If the return value is used, don't do the transformation. */
12434 if (! ignore)
12435 return NULL_TREE;
12437 /* Verify the required arguments in the original call. */
12438 if (!validate_arg (fmt, POINTER_TYPE))
12439 return NULL_TREE;
12441 /* Check whether the format is a literal string constant. */
12442 fmt_str = c_getstr (fmt);
12443 if (fmt_str == NULL)
12444 return NULL_TREE;
12446 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12448 /* If we're using an unlocked function, assume the other
12449 unlocked functions exist explicitly. */
12450 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12451 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12453 else
12455 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12456 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12459 if (!init_target_chars ())
12460 return NULL_TREE;
12462 if (strcmp (fmt_str, target_percent_s) == 0
12463 || strchr (fmt_str, target_percent) == NULL)
12465 const char *str;
12467 if (strcmp (fmt_str, target_percent_s) == 0)
12469 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12470 return NULL_TREE;
12472 if (!arg || !validate_arg (arg, POINTER_TYPE))
12473 return NULL_TREE;
12475 str = c_getstr (arg);
12476 if (str == NULL)
12477 return NULL_TREE;
12479 else
12481 /* The format specifier doesn't contain any '%' characters. */
12482 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12483 && arg)
12484 return NULL_TREE;
12485 str = fmt_str;
12488 /* If the string was "", printf does nothing. */
12489 if (str[0] == '\0')
12490 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12492 /* If the string has length of 1, call putchar. */
12493 if (str[1] == '\0')
12495 /* Given printf("c"), (where c is any one character,)
12496 convert "c"[0] to an int and pass that to the replacement
12497 function. */
12498 newarg = build_int_cst (NULL_TREE, str[0]);
12499 if (fn_putchar)
12500 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
12502 else
12504 /* If the string was "string\n", call puts("string"). */
12505 size_t len = strlen (str);
12506 if ((unsigned char)str[len - 1] == target_newline)
12508 /* Create a NUL-terminated string that's one char shorter
12509 than the original, stripping off the trailing '\n'. */
12510 char *newstr = XALLOCAVEC (char, len);
12511 memcpy (newstr, str, len - 1);
12512 newstr[len - 1] = 0;
12514 newarg = build_string_literal (len, newstr);
12515 if (fn_puts)
12516 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
12518 else
12519 /* We'd like to arrange to call fputs(string,stdout) here,
12520 but we need stdout and don't have a way to get it yet. */
12521 return NULL_TREE;
12525 /* The other optimizations can be done only on the non-va_list variants. */
12526 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12527 return NULL_TREE;
12529 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12530 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12532 if (!arg || !validate_arg (arg, POINTER_TYPE))
12533 return NULL_TREE;
12534 if (fn_puts)
12535 call = build_call_expr_loc (loc, fn_puts, 1, arg);
12538 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12539 else if (strcmp (fmt_str, target_percent_c) == 0)
12541 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12542 return NULL_TREE;
12543 if (fn_putchar)
12544 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
12547 if (!call)
12548 return NULL_TREE;
12550 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12553 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12554 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12555 more than 3 arguments, and ARG may be null in the 2-argument case.
12557 Return NULL_TREE if no simplification was possible, otherwise return the
12558 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12559 code of the function to be simplified. */
12561 static tree
12562 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
12563 tree fmt, tree arg, bool ignore,
12564 enum built_in_function fcode)
12566 tree fn_fputc, fn_fputs, call = NULL_TREE;
12567 const char *fmt_str = NULL;
12569 /* If the return value is used, don't do the transformation. */
12570 if (! ignore)
12571 return NULL_TREE;
12573 /* Verify the required arguments in the original call. */
12574 if (!validate_arg (fp, POINTER_TYPE))
12575 return NULL_TREE;
12576 if (!validate_arg (fmt, POINTER_TYPE))
12577 return NULL_TREE;
12579 /* Check whether the format is a literal string constant. */
12580 fmt_str = c_getstr (fmt);
12581 if (fmt_str == NULL)
12582 return NULL_TREE;
12584 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12586 /* If we're using an unlocked function, assume the other
12587 unlocked functions exist explicitly. */
12588 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12589 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12591 else
12593 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12594 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12597 if (!init_target_chars ())
12598 return NULL_TREE;
12600 /* If the format doesn't contain % args or %%, use strcpy. */
12601 if (strchr (fmt_str, target_percent) == NULL)
12603 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12604 && arg)
12605 return NULL_TREE;
12607 /* If the format specifier was "", fprintf does nothing. */
12608 if (fmt_str[0] == '\0')
12610 /* If FP has side-effects, just wait until gimplification is
12611 done. */
12612 if (TREE_SIDE_EFFECTS (fp))
12613 return NULL_TREE;
12615 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12618 /* When "string" doesn't contain %, replace all cases of
12619 fprintf (fp, string) with fputs (string, fp). The fputs
12620 builtin will take care of special cases like length == 1. */
12621 if (fn_fputs)
12622 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
12625 /* The other optimizations can be done only on the non-va_list variants. */
12626 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12627 return NULL_TREE;
12629 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12630 else if (strcmp (fmt_str, target_percent_s) == 0)
12632 if (!arg || !validate_arg (arg, POINTER_TYPE))
12633 return NULL_TREE;
12634 if (fn_fputs)
12635 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
12638 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12639 else if (strcmp (fmt_str, target_percent_c) == 0)
12641 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12642 return NULL_TREE;
12643 if (fn_fputc)
12644 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
12647 if (!call)
12648 return NULL_TREE;
12649 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
12652 /* Initialize format string characters in the target charset. */
12654 static bool
12655 init_target_chars (void)
12657 static bool init;
12658 if (!init)
12660 target_newline = lang_hooks.to_target_charset ('\n');
12661 target_percent = lang_hooks.to_target_charset ('%');
12662 target_c = lang_hooks.to_target_charset ('c');
12663 target_s = lang_hooks.to_target_charset ('s');
12664 if (target_newline == 0 || target_percent == 0 || target_c == 0
12665 || target_s == 0)
12666 return false;
12668 target_percent_c[0] = target_percent;
12669 target_percent_c[1] = target_c;
12670 target_percent_c[2] = '\0';
12672 target_percent_s[0] = target_percent;
12673 target_percent_s[1] = target_s;
12674 target_percent_s[2] = '\0';
12676 target_percent_s_newline[0] = target_percent;
12677 target_percent_s_newline[1] = target_s;
12678 target_percent_s_newline[2] = target_newline;
12679 target_percent_s_newline[3] = '\0';
12681 init = true;
12683 return true;
12686 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12687 and no overflow/underflow occurred. INEXACT is true if M was not
12688 exactly calculated. TYPE is the tree type for the result. This
12689 function assumes that you cleared the MPFR flags and then
12690 calculated M to see if anything subsequently set a flag prior to
12691 entering this function. Return NULL_TREE if any checks fail. */
12693 static tree
12694 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12696 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12697 overflow/underflow occurred. If -frounding-math, proceed iff the
12698 result of calling FUNC was exact. */
12699 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12700 && (!flag_rounding_math || !inexact))
12702 REAL_VALUE_TYPE rr;
12704 real_from_mpfr (&rr, m, type, GMP_RNDN);
12705 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12706 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12707 but the mpft_t is not, then we underflowed in the
12708 conversion. */
12709 if (real_isfinite (&rr)
12710 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12712 REAL_VALUE_TYPE rmode;
12714 real_convert (&rmode, TYPE_MODE (type), &rr);
12715 /* Proceed iff the specified mode can hold the value. */
12716 if (real_identical (&rmode, &rr))
12717 return build_real (type, rmode);
12720 return NULL_TREE;
12723 #ifdef HAVE_mpc
12724 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
12725 number and no overflow/underflow occurred. INEXACT is true if M
12726 was not exactly calculated. TYPE is the tree type for the result.
12727 This function assumes that you cleared the MPFR flags and then
12728 calculated M to see if anything subsequently set a flag prior to
12729 entering this function. Return NULL_TREE if any checks fail, if
12730 FORCE_CONVERT is true, then bypass the checks. */
12732 static tree
12733 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
12735 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12736 overflow/underflow occurred. If -frounding-math, proceed iff the
12737 result of calling FUNC was exact. */
12738 if (force_convert
12739 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
12740 && !mpfr_overflow_p () && !mpfr_underflow_p ()
12741 && (!flag_rounding_math || !inexact)))
12743 REAL_VALUE_TYPE re, im;
12745 real_from_mpfr (&re, mpc_realref (m), type, GMP_RNDN);
12746 real_from_mpfr (&im, mpc_imagref (m), type, GMP_RNDN);
12747 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
12748 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12749 but the mpft_t is not, then we underflowed in the
12750 conversion. */
12751 if (force_convert
12752 || (real_isfinite (&re) && real_isfinite (&im)
12753 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
12754 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
12756 REAL_VALUE_TYPE re_mode, im_mode;
12758 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
12759 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
12760 /* Proceed iff the specified mode can hold the value. */
12761 if (force_convert
12762 || (real_identical (&re_mode, &re)
12763 && real_identical (&im_mode, &im)))
12764 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
12765 build_real (TREE_TYPE (type), im_mode));
12768 return NULL_TREE;
12770 #endif /* HAVE_mpc */
12772 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12773 FUNC on it and return the resulting value as a tree with type TYPE.
12774 If MIN and/or MAX are not NULL, then the supplied ARG must be
12775 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12776 acceptable values, otherwise they are not. The mpfr precision is
12777 set to the precision of TYPE. We assume that function FUNC returns
12778 zero if the result could be calculated exactly within the requested
12779 precision. */
12781 static tree
12782 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12783 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12784 bool inclusive)
12786 tree result = NULL_TREE;
12788 STRIP_NOPS (arg);
12790 /* To proceed, MPFR must exactly represent the target floating point
12791 format, which only happens when the target base equals two. */
12792 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12793 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12795 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12797 if (real_isfinite (ra)
12798 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12799 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12801 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12802 const int prec = fmt->p;
12803 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12804 int inexact;
12805 mpfr_t m;
12807 mpfr_init2 (m, prec);
12808 mpfr_from_real (m, ra, GMP_RNDN);
12809 mpfr_clear_flags ();
12810 inexact = func (m, m, rnd);
12811 result = do_mpfr_ckconv (m, type, inexact);
12812 mpfr_clear (m);
12816 return result;
12819 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12820 FUNC on it and return the resulting value as a tree with type TYPE.
12821 The mpfr precision is set to the precision of TYPE. We assume that
12822 function FUNC returns zero if the result could be calculated
12823 exactly within the requested precision. */
12825 static tree
12826 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12827 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12829 tree result = NULL_TREE;
12831 STRIP_NOPS (arg1);
12832 STRIP_NOPS (arg2);
12834 /* To proceed, MPFR must exactly represent the target floating point
12835 format, which only happens when the target base equals two. */
12836 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12837 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12838 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12840 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12841 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12843 if (real_isfinite (ra1) && real_isfinite (ra2))
12845 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12846 const int prec = fmt->p;
12847 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12848 int inexact;
12849 mpfr_t m1, m2;
12851 mpfr_inits2 (prec, m1, m2, NULL);
12852 mpfr_from_real (m1, ra1, GMP_RNDN);
12853 mpfr_from_real (m2, ra2, GMP_RNDN);
12854 mpfr_clear_flags ();
12855 inexact = func (m1, m1, m2, rnd);
12856 result = do_mpfr_ckconv (m1, type, inexact);
12857 mpfr_clears (m1, m2, NULL);
12861 return result;
12864 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12865 FUNC on it and return the resulting value as a tree with type TYPE.
12866 The mpfr precision is set to the precision of TYPE. We assume that
12867 function FUNC returns zero if the result could be calculated
12868 exactly within the requested precision. */
12870 static tree
12871 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12872 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12874 tree result = NULL_TREE;
12876 STRIP_NOPS (arg1);
12877 STRIP_NOPS (arg2);
12878 STRIP_NOPS (arg3);
12880 /* To proceed, MPFR must exactly represent the target floating point
12881 format, which only happens when the target base equals two. */
12882 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12883 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12884 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12885 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12887 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12888 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12889 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12891 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12893 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12894 const int prec = fmt->p;
12895 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12896 int inexact;
12897 mpfr_t m1, m2, m3;
12899 mpfr_inits2 (prec, m1, m2, m3, NULL);
12900 mpfr_from_real (m1, ra1, GMP_RNDN);
12901 mpfr_from_real (m2, ra2, GMP_RNDN);
12902 mpfr_from_real (m3, ra3, GMP_RNDN);
12903 mpfr_clear_flags ();
12904 inexact = func (m1, m1, m2, m3, rnd);
12905 result = do_mpfr_ckconv (m1, type, inexact);
12906 mpfr_clears (m1, m2, m3, NULL);
12910 return result;
12913 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12914 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12915 If ARG_SINP and ARG_COSP are NULL then the result is returned
12916 as a complex value.
12917 The type is taken from the type of ARG and is used for setting the
12918 precision of the calculation and results. */
12920 static tree
12921 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12923 tree const type = TREE_TYPE (arg);
12924 tree result = NULL_TREE;
12926 STRIP_NOPS (arg);
12928 /* To proceed, MPFR must exactly represent the target floating point
12929 format, which only happens when the target base equals two. */
12930 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12931 && TREE_CODE (arg) == REAL_CST
12932 && !TREE_OVERFLOW (arg))
12934 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12936 if (real_isfinite (ra))
12938 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12939 const int prec = fmt->p;
12940 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
12941 tree result_s, result_c;
12942 int inexact;
12943 mpfr_t m, ms, mc;
12945 mpfr_inits2 (prec, m, ms, mc, NULL);
12946 mpfr_from_real (m, ra, GMP_RNDN);
12947 mpfr_clear_flags ();
12948 inexact = mpfr_sin_cos (ms, mc, m, rnd);
12949 result_s = do_mpfr_ckconv (ms, type, inexact);
12950 result_c = do_mpfr_ckconv (mc, type, inexact);
12951 mpfr_clears (m, ms, mc, NULL);
12952 if (result_s && result_c)
12954 /* If we are to return in a complex value do so. */
12955 if (!arg_sinp && !arg_cosp)
12956 return build_complex (build_complex_type (type),
12957 result_c, result_s);
12959 /* Dereference the sin/cos pointer arguments. */
12960 arg_sinp = build_fold_indirect_ref (arg_sinp);
12961 arg_cosp = build_fold_indirect_ref (arg_cosp);
12962 /* Proceed if valid pointer type were passed in. */
12963 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12964 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12966 /* Set the values. */
12967 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12968 result_s);
12969 TREE_SIDE_EFFECTS (result_s) = 1;
12970 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12971 result_c);
12972 TREE_SIDE_EFFECTS (result_c) = 1;
12973 /* Combine the assignments into a compound expr. */
12974 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12975 result_s, result_c));
12980 return result;
12983 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12984 two-argument mpfr order N Bessel function FUNC on them and return
12985 the resulting value as a tree with type TYPE. The mpfr precision
12986 is set to the precision of TYPE. We assume that function FUNC
12987 returns zero if the result could be calculated exactly within the
12988 requested precision. */
12989 static tree
12990 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12991 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12992 const REAL_VALUE_TYPE *min, bool inclusive)
12994 tree result = NULL_TREE;
12996 STRIP_NOPS (arg1);
12997 STRIP_NOPS (arg2);
12999 /* To proceed, MPFR must exactly represent the target floating point
13000 format, which only happens when the target base equals two. */
13001 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13002 && host_integerp (arg1, 0)
13003 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13005 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
13006 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13008 if (n == (long)n
13009 && real_isfinite (ra)
13010 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13012 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13013 const int prec = fmt->p;
13014 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13015 int inexact;
13016 mpfr_t m;
13018 mpfr_init2 (m, prec);
13019 mpfr_from_real (m, ra, GMP_RNDN);
13020 mpfr_clear_flags ();
13021 inexact = func (m, n, m, rnd);
13022 result = do_mpfr_ckconv (m, type, inexact);
13023 mpfr_clear (m);
13027 return result;
13030 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13031 the pointer *(ARG_QUO) and return the result. The type is taken
13032 from the type of ARG0 and is used for setting the precision of the
13033 calculation and results. */
13035 static tree
13036 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13038 tree const type = TREE_TYPE (arg0);
13039 tree result = NULL_TREE;
13041 STRIP_NOPS (arg0);
13042 STRIP_NOPS (arg1);
13044 /* To proceed, MPFR must exactly represent the target floating point
13045 format, which only happens when the target base equals two. */
13046 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13047 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13048 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13050 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13051 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13053 if (real_isfinite (ra0) && real_isfinite (ra1))
13055 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13056 const int prec = fmt->p;
13057 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13058 tree result_rem;
13059 long integer_quo;
13060 mpfr_t m0, m1;
13062 mpfr_inits2 (prec, m0, m1, NULL);
13063 mpfr_from_real (m0, ra0, GMP_RNDN);
13064 mpfr_from_real (m1, ra1, GMP_RNDN);
13065 mpfr_clear_flags ();
13066 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13067 /* Remquo is independent of the rounding mode, so pass
13068 inexact=0 to do_mpfr_ckconv(). */
13069 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13070 mpfr_clears (m0, m1, NULL);
13071 if (result_rem)
13073 /* MPFR calculates quo in the host's long so it may
13074 return more bits in quo than the target int can hold
13075 if sizeof(host long) > sizeof(target int). This can
13076 happen even for native compilers in LP64 mode. In
13077 these cases, modulo the quo value with the largest
13078 number that the target int can hold while leaving one
13079 bit for the sign. */
13080 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13081 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13083 /* Dereference the quo pointer argument. */
13084 arg_quo = build_fold_indirect_ref (arg_quo);
13085 /* Proceed iff a valid pointer type was passed in. */
13086 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13088 /* Set the value. */
13089 tree result_quo = fold_build2 (MODIFY_EXPR,
13090 TREE_TYPE (arg_quo), arg_quo,
13091 build_int_cst (NULL, integer_quo));
13092 TREE_SIDE_EFFECTS (result_quo) = 1;
13093 /* Combine the quo assignment with the rem. */
13094 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13095 result_quo, result_rem));
13100 return result;
13103 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13104 resulting value as a tree with type TYPE. The mpfr precision is
13105 set to the precision of TYPE. We assume that this mpfr function
13106 returns zero if the result could be calculated exactly within the
13107 requested precision. In addition, the integer pointer represented
13108 by ARG_SG will be dereferenced and set to the appropriate signgam
13109 (-1,1) value. */
13111 static tree
13112 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13114 tree result = NULL_TREE;
13116 STRIP_NOPS (arg);
13118 /* To proceed, MPFR must exactly represent the target floating point
13119 format, which only happens when the target base equals two. Also
13120 verify ARG is a constant and that ARG_SG is an int pointer. */
13121 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13122 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13123 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13124 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13126 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13128 /* In addition to NaN and Inf, the argument cannot be zero or a
13129 negative integer. */
13130 if (real_isfinite (ra)
13131 && ra->cl != rvc_zero
13132 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13134 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13135 const int prec = fmt->p;
13136 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13137 int inexact, sg;
13138 mpfr_t m;
13139 tree result_lg;
13141 mpfr_init2 (m, prec);
13142 mpfr_from_real (m, ra, GMP_RNDN);
13143 mpfr_clear_flags ();
13144 inexact = mpfr_lgamma (m, &sg, m, rnd);
13145 result_lg = do_mpfr_ckconv (m, type, inexact);
13146 mpfr_clear (m);
13147 if (result_lg)
13149 tree result_sg;
13151 /* Dereference the arg_sg pointer argument. */
13152 arg_sg = build_fold_indirect_ref (arg_sg);
13153 /* Assign the signgam value into *arg_sg. */
13154 result_sg = fold_build2 (MODIFY_EXPR,
13155 TREE_TYPE (arg_sg), arg_sg,
13156 build_int_cst (NULL, sg));
13157 TREE_SIDE_EFFECTS (result_sg) = 1;
13158 /* Combine the signgam assignment with the lgamma result. */
13159 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13160 result_sg, result_lg));
13165 return result;
13168 #ifdef HAVE_mpc
13169 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
13170 function FUNC on it and return the resulting value as a tree with
13171 type TYPE. The mpfr precision is set to the precision of TYPE. We
13172 assume that function FUNC returns zero if the result could be
13173 calculated exactly within the requested precision. */
13175 static tree
13176 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
13178 tree result = NULL_TREE;
13180 STRIP_NOPS (arg);
13182 /* To proceed, MPFR must exactly represent the target floating point
13183 format, which only happens when the target base equals two. */
13184 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
13185 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
13186 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
13188 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
13189 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
13191 if (real_isfinite (re) && real_isfinite (im))
13193 const struct real_format *const fmt =
13194 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13195 const int prec = fmt->p;
13196 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13197 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13198 int inexact;
13199 mpc_t m;
13201 mpc_init2 (m, prec);
13202 mpfr_from_real (mpc_realref(m), re, rnd);
13203 mpfr_from_real (mpc_imagref(m), im, rnd);
13204 mpfr_clear_flags ();
13205 inexact = func (m, m, crnd);
13206 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
13207 mpc_clear (m);
13211 return result;
13214 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
13215 mpc function FUNC on it and return the resulting value as a tree
13216 with type TYPE. The mpfr precision is set to the precision of
13217 TYPE. We assume that function FUNC returns zero if the result
13218 could be calculated exactly within the requested precision. If
13219 DO_NONFINITE is true, then fold expressions containing Inf or NaN
13220 in the arguments and/or results. */
13222 #ifdef HAVE_mpc
13223 tree
13224 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
13225 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
13227 tree result = NULL_TREE;
13229 STRIP_NOPS (arg0);
13230 STRIP_NOPS (arg1);
13232 /* To proceed, MPFR must exactly represent the target floating point
13233 format, which only happens when the target base equals two. */
13234 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
13235 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
13236 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
13237 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
13238 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
13240 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
13241 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
13242 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
13243 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
13245 if (do_nonfinite
13246 || (real_isfinite (re0) && real_isfinite (im0)
13247 && real_isfinite (re1) && real_isfinite (im1)))
13249 const struct real_format *const fmt =
13250 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
13251 const int prec = fmt->p;
13252 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
13253 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
13254 int inexact;
13255 mpc_t m0, m1;
13257 mpc_init2 (m0, prec);
13258 mpc_init2 (m1, prec);
13259 mpfr_from_real (mpc_realref(m0), re0, rnd);
13260 mpfr_from_real (mpc_imagref(m0), im0, rnd);
13261 mpfr_from_real (mpc_realref(m1), re1, rnd);
13262 mpfr_from_real (mpc_imagref(m1), im1, rnd);
13263 mpfr_clear_flags ();
13264 inexact = func (m0, m0, m1, crnd);
13265 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
13266 mpc_clear (m0);
13267 mpc_clear (m1);
13271 return result;
13273 # endif
13274 #endif /* HAVE_mpc */
13276 /* FIXME tuples.
13277 The functions below provide an alternate interface for folding
13278 builtin function calls presented as GIMPLE_CALL statements rather
13279 than as CALL_EXPRs. The folded result is still expressed as a
13280 tree. There is too much code duplication in the handling of
13281 varargs functions, and a more intrusive re-factoring would permit
13282 better sharing of code between the tree and statement-based
13283 versions of these functions. */
13285 /* Construct a new CALL_EXPR using the tail of the argument list of STMT
13286 along with N new arguments specified as the "..." parameters. SKIP
13287 is the number of arguments in STMT to be omitted. This function is used
13288 to do varargs-to-varargs transformations. */
13290 static tree
13291 gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
13293 int oldnargs = gimple_call_num_args (stmt);
13294 int nargs = oldnargs - skip + n;
13295 tree fntype = TREE_TYPE (fndecl);
13296 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
13297 tree *buffer;
13298 int i, j;
13299 va_list ap;
13300 location_t loc = gimple_location (stmt);
13302 buffer = XALLOCAVEC (tree, nargs);
13303 va_start (ap, n);
13304 for (i = 0; i < n; i++)
13305 buffer[i] = va_arg (ap, tree);
13306 va_end (ap);
13307 for (j = skip; j < oldnargs; j++, i++)
13308 buffer[i] = gimple_call_arg (stmt, j);
13310 return fold (build_call_array_loc (loc, TREE_TYPE (fntype), fn, nargs, buffer));
13313 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
13314 a normal call should be emitted rather than expanding the function
13315 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13317 static tree
13318 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
13320 tree dest, size, len, fn, fmt, flag;
13321 const char *fmt_str;
13322 int nargs = gimple_call_num_args (stmt);
13324 /* Verify the required arguments in the original call. */
13325 if (nargs < 4)
13326 return NULL_TREE;
13327 dest = gimple_call_arg (stmt, 0);
13328 if (!validate_arg (dest, POINTER_TYPE))
13329 return NULL_TREE;
13330 flag = gimple_call_arg (stmt, 1);
13331 if (!validate_arg (flag, INTEGER_TYPE))
13332 return NULL_TREE;
13333 size = gimple_call_arg (stmt, 2);
13334 if (!validate_arg (size, INTEGER_TYPE))
13335 return NULL_TREE;
13336 fmt = gimple_call_arg (stmt, 3);
13337 if (!validate_arg (fmt, POINTER_TYPE))
13338 return NULL_TREE;
13340 if (! host_integerp (size, 1))
13341 return NULL_TREE;
13343 len = NULL_TREE;
13345 if (!init_target_chars ())
13346 return NULL_TREE;
13348 /* Check whether the format is a literal string constant. */
13349 fmt_str = c_getstr (fmt);
13350 if (fmt_str != NULL)
13352 /* If the format doesn't contain % args or %%, we know the size. */
13353 if (strchr (fmt_str, target_percent) == 0)
13355 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13356 len = build_int_cstu (size_type_node, strlen (fmt_str));
13358 /* If the format is "%s" and first ... argument is a string literal,
13359 we know the size too. */
13360 else if (fcode == BUILT_IN_SPRINTF_CHK
13361 && strcmp (fmt_str, target_percent_s) == 0)
13363 tree arg;
13365 if (nargs == 5)
13367 arg = gimple_call_arg (stmt, 4);
13368 if (validate_arg (arg, POINTER_TYPE))
13370 len = c_strlen (arg, 1);
13371 if (! len || ! host_integerp (len, 1))
13372 len = NULL_TREE;
13378 if (! integer_all_onesp (size))
13380 if (! len || ! tree_int_cst_lt (len, size))
13381 return NULL_TREE;
13384 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13385 or if format doesn't contain % chars or is "%s". */
13386 if (! integer_zerop (flag))
13388 if (fmt_str == NULL)
13389 return NULL_TREE;
13390 if (strchr (fmt_str, target_percent) != NULL
13391 && strcmp (fmt_str, target_percent_s))
13392 return NULL_TREE;
13395 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13396 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
13397 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
13398 if (!fn)
13399 return NULL_TREE;
13401 return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
13404 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
13405 a normal call should be emitted rather than expanding the function
13406 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13407 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13408 passed as second argument. */
13410 tree
13411 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
13412 enum built_in_function fcode)
13414 tree dest, size, len, fn, fmt, flag;
13415 const char *fmt_str;
13417 /* Verify the required arguments in the original call. */
13418 if (gimple_call_num_args (stmt) < 5)
13419 return NULL_TREE;
13420 dest = gimple_call_arg (stmt, 0);
13421 if (!validate_arg (dest, POINTER_TYPE))
13422 return NULL_TREE;
13423 len = gimple_call_arg (stmt, 1);
13424 if (!validate_arg (len, INTEGER_TYPE))
13425 return NULL_TREE;
13426 flag = gimple_call_arg (stmt, 2);
13427 if (!validate_arg (flag, INTEGER_TYPE))
13428 return NULL_TREE;
13429 size = gimple_call_arg (stmt, 3);
13430 if (!validate_arg (size, INTEGER_TYPE))
13431 return NULL_TREE;
13432 fmt = gimple_call_arg (stmt, 4);
13433 if (!validate_arg (fmt, POINTER_TYPE))
13434 return NULL_TREE;
13436 if (! host_integerp (size, 1))
13437 return NULL_TREE;
13439 if (! integer_all_onesp (size))
13441 if (! host_integerp (len, 1))
13443 /* If LEN is not constant, try MAXLEN too.
13444 For MAXLEN only allow optimizing into non-_ocs function
13445 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13446 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
13447 return NULL_TREE;
13449 else
13450 maxlen = len;
13452 if (tree_int_cst_lt (size, maxlen))
13453 return NULL_TREE;
13456 if (!init_target_chars ())
13457 return NULL_TREE;
13459 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13460 or if format doesn't contain % chars or is "%s". */
13461 if (! integer_zerop (flag))
13463 fmt_str = c_getstr (fmt);
13464 if (fmt_str == NULL)
13465 return NULL_TREE;
13466 if (strchr (fmt_str, target_percent) != NULL
13467 && strcmp (fmt_str, target_percent_s))
13468 return NULL_TREE;
13471 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13472 available. */
13473 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
13474 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
13475 if (!fn)
13476 return NULL_TREE;
13478 return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
13481 /* Builtins with folding operations that operate on "..." arguments
13482 need special handling; we need to store the arguments in a convenient
13483 data structure before attempting any folding. Fortunately there are
13484 only a few builtins that fall into this category. FNDECL is the
13485 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13486 result of the function call is ignored. */
13488 static tree
13489 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
13490 bool ignore ATTRIBUTE_UNUSED)
13492 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13493 tree ret = NULL_TREE;
13495 switch (fcode)
13497 case BUILT_IN_SPRINTF_CHK:
13498 case BUILT_IN_VSPRINTF_CHK:
13499 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
13500 break;
13502 case BUILT_IN_SNPRINTF_CHK:
13503 case BUILT_IN_VSNPRINTF_CHK:
13504 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
13506 default:
13507 break;
13509 if (ret)
13511 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13512 TREE_NO_WARNING (ret) = 1;
13513 return ret;
13515 return NULL_TREE;
13518 /* A wrapper function for builtin folding that prevents warnings for
13519 "statement without effect" and the like, caused by removing the
13520 call node earlier than the warning is generated. */
13522 tree
13523 fold_call_stmt (gimple stmt, bool ignore)
13525 tree ret = NULL_TREE;
13526 tree fndecl = gimple_call_fndecl (stmt);
13527 location_t loc = gimple_location (stmt);
13528 if (fndecl
13529 && TREE_CODE (fndecl) == FUNCTION_DECL
13530 && DECL_BUILT_IN (fndecl)
13531 && !gimple_call_va_arg_pack_p (stmt))
13533 int nargs = gimple_call_num_args (stmt);
13535 if (avoid_folding_inline_builtin (fndecl))
13536 return NULL_TREE;
13537 /* FIXME: Don't use a list in this interface. */
13538 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
13540 tree arglist = NULL_TREE;
13541 int i;
13542 for (i = nargs - 1; i >= 0; i--)
13543 arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
13544 return targetm.fold_builtin (fndecl, arglist, ignore);
13546 else
13548 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
13550 tree args[MAX_ARGS_TO_FOLD_BUILTIN];
13551 int i;
13552 for (i = 0; i < nargs; i++)
13553 args[i] = gimple_call_arg (stmt, i);
13554 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
13556 if (!ret)
13557 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
13558 if (ret)
13560 /* Propagate location information from original call to
13561 expansion of builtin. Otherwise things like
13562 maybe_emit_chk_warning, that operate on the expansion
13563 of a builtin, will use the wrong location information. */
13564 if (gimple_has_location (stmt))
13566 tree realret = ret;
13567 if (TREE_CODE (ret) == NOP_EXPR)
13568 realret = TREE_OPERAND (ret, 0);
13569 if (CAN_HAVE_LOCATION_P (realret)
13570 && !EXPR_HAS_LOCATION (realret))
13571 SET_EXPR_LOCATION (realret, loc);
13572 return realret;
13574 return ret;
13578 return NULL_TREE;
13581 /* Look up the function in built_in_decls that corresponds to DECL
13582 and set ASMSPEC as its user assembler name. DECL must be a
13583 function decl that declares a builtin. */
13585 void
13586 set_builtin_user_assembler_name (tree decl, const char *asmspec)
13588 tree builtin;
13589 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
13590 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
13591 && asmspec != 0);
13593 builtin = built_in_decls [DECL_FUNCTION_CODE (decl)];
13594 set_user_assembler_name (builtin, asmspec);
13595 switch (DECL_FUNCTION_CODE (decl))
13597 case BUILT_IN_MEMCPY:
13598 init_block_move_fn (asmspec);
13599 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
13600 break;
13601 case BUILT_IN_MEMSET:
13602 init_block_clear_fn (asmspec);
13603 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
13604 break;
13605 case BUILT_IN_MEMMOVE:
13606 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
13607 break;
13608 case BUILT_IN_MEMCMP:
13609 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
13610 break;
13611 case BUILT_IN_ABORT:
13612 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
13613 break;
13614 default:
13615 break;