2008-06-04 Xinliang David Li <davidxl@google.com>
[official-gcc.git] / gcc / builtins.c
blob05eb6bbdd985c975a6303e8b2d1093fb36c22bbb
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tree-gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
67 #undef DEF_BUILTIN
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
85 #endif
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
187 enum tree_code);
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
239 #endif
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node)
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
249 return true;
250 if (strncmp (name, "__sync_", 7) == 0)
251 return true;
252 return false;
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp, unsigned int max_align)
266 unsigned int align, inner;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
270 return 0;
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
273 return 0;
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
278 while (1)
280 switch (TREE_CODE (exp))
282 CASE_CONVERT:
283 exp = TREE_OPERAND (exp, 0);
284 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
285 return align;
287 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
288 align = MIN (inner, max_align);
289 break;
291 case POINTER_PLUS_EXPR:
292 /* If sum of pointer + int, restrict our maximum alignment to that
293 imposed by the integer. If not, we can't do any better than
294 ALIGN. */
295 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
296 return align;
298 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
299 & (max_align / BITS_PER_UNIT - 1))
300 != 0)
301 max_align >>= 1;
303 exp = TREE_OPERAND (exp, 0);
304 break;
306 case ADDR_EXPR:
307 /* See what we are pointing at and look at its alignment. */
308 exp = TREE_OPERAND (exp, 0);
309 inner = max_align;
310 if (handled_component_p (exp))
312 HOST_WIDE_INT bitsize, bitpos;
313 tree offset;
314 enum machine_mode mode;
315 int unsignedp, volatilep;
317 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
318 &mode, &unsignedp, &volatilep, true);
319 if (bitpos)
320 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
321 if (offset && TREE_CODE (offset) == PLUS_EXPR
322 && host_integerp (TREE_OPERAND (offset, 1), 1))
324 /* Any overflow in calculating offset_bits won't change
325 the alignment. */
326 unsigned offset_bits
327 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
328 * BITS_PER_UNIT);
330 if (offset_bits)
331 inner = MIN (inner, (offset_bits & -offset_bits));
332 offset = TREE_OPERAND (offset, 0);
334 if (offset && TREE_CODE (offset) == MULT_EXPR
335 && host_integerp (TREE_OPERAND (offset, 1), 1))
337 /* Any overflow in calculating offset_factor won't change
338 the alignment. */
339 unsigned offset_factor
340 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
341 * BITS_PER_UNIT);
343 if (offset_factor)
344 inner = MIN (inner, (offset_factor & -offset_factor));
346 else if (offset)
347 inner = MIN (inner, BITS_PER_UNIT);
349 if (DECL_P (exp))
350 align = MIN (inner, DECL_ALIGN (exp));
351 #ifdef CONSTANT_ALIGNMENT
352 else if (CONSTANT_CLASS_P (exp))
353 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
354 #endif
355 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
356 || TREE_CODE (exp) == INDIRECT_REF)
357 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
358 else
359 align = MIN (align, inner);
360 return MIN (align, max_align);
362 default:
363 return align;
368 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
369 way, because it could contain a zero byte in the middle.
370 TREE_STRING_LENGTH is the size of the character array, not the string.
372 ONLY_VALUE should be nonzero if the result is not going to be emitted
373 into the instruction stream and zero if it is going to be expanded.
374 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
375 is returned, otherwise NULL, since
376 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
377 evaluate the side-effects.
379 The value returned is of type `ssizetype'.
381 Unfortunately, string_constant can't access the values of const char
382 arrays with initializers, so neither can we do so here. */
384 tree
385 c_strlen (tree src, int only_value)
387 tree offset_node;
388 HOST_WIDE_INT offset;
389 int max;
390 const char *ptr;
392 STRIP_NOPS (src);
393 if (TREE_CODE (src) == COND_EXPR
394 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
396 tree len1, len2;
398 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
399 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
400 if (tree_int_cst_equal (len1, len2))
401 return len1;
404 if (TREE_CODE (src) == COMPOUND_EXPR
405 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
406 return c_strlen (TREE_OPERAND (src, 1), only_value);
408 src = string_constant (src, &offset_node);
409 if (src == 0)
410 return NULL_TREE;
412 max = TREE_STRING_LENGTH (src) - 1;
413 ptr = TREE_STRING_POINTER (src);
415 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
417 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
418 compute the offset to the following null if we don't know where to
419 start searching for it. */
420 int i;
422 for (i = 0; i < max; i++)
423 if (ptr[i] == 0)
424 return NULL_TREE;
426 /* We don't know the starting offset, but we do know that the string
427 has no internal zero bytes. We can assume that the offset falls
428 within the bounds of the string; otherwise, the programmer deserves
429 what he gets. Subtract the offset from the length of the string,
430 and return that. This would perhaps not be valid if we were dealing
431 with named arrays in addition to literal string constants. */
433 return size_diffop (size_int (max), offset_node);
436 /* We have a known offset into the string. Start searching there for
437 a null character if we can represent it as a single HOST_WIDE_INT. */
438 if (offset_node == 0)
439 offset = 0;
440 else if (! host_integerp (offset_node, 0))
441 offset = -1;
442 else
443 offset = tree_low_cst (offset_node, 0);
445 /* If the offset is known to be out of bounds, warn, and call strlen at
446 runtime. */
447 if (offset < 0 || offset > max)
449 /* Suppress multiple warnings for propagated constant strings. */
450 if (! TREE_NO_WARNING (src))
452 warning (0, "offset outside bounds of constant string");
453 TREE_NO_WARNING (src) = 1;
455 return NULL_TREE;
458 /* Use strlen to search for the first zero byte. Since any strings
459 constructed with build_string will have nulls appended, we win even
460 if we get handed something like (char[4])"abcd".
462 Since OFFSET is our starting index into the string, no further
463 calculation is needed. */
464 return ssize_int (strlen (ptr + offset));
467 /* Return a char pointer for a C string if it is a string constant
468 or sum of string constant and integer constant. */
470 static const char *
471 c_getstr (tree src)
473 tree offset_node;
475 src = string_constant (src, &offset_node);
476 if (src == 0)
477 return 0;
479 if (offset_node == 0)
480 return TREE_STRING_POINTER (src);
481 else if (!host_integerp (offset_node, 1)
482 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
483 return 0;
485 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
488 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
489 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
491 static rtx
492 c_readstr (const char *str, enum machine_mode mode)
494 HOST_WIDE_INT c[2];
495 HOST_WIDE_INT ch;
496 unsigned int i, j;
498 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
500 c[0] = 0;
501 c[1] = 0;
502 ch = 1;
503 for (i = 0; i < GET_MODE_SIZE (mode); i++)
505 j = i;
506 if (WORDS_BIG_ENDIAN)
507 j = GET_MODE_SIZE (mode) - i - 1;
508 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
509 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
510 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
511 j *= BITS_PER_UNIT;
512 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
514 if (ch)
515 ch = (unsigned char) str[i];
516 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
518 return immed_double_const (c[0], c[1], mode);
521 /* Cast a target constant CST to target CHAR and if that value fits into
522 host char type, return zero and put that value into variable pointed to by
523 P. */
525 static int
526 target_char_cast (tree cst, char *p)
528 unsigned HOST_WIDE_INT val, hostval;
530 if (!host_integerp (cst, 1)
531 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
532 return 1;
534 val = tree_low_cst (cst, 1);
535 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
536 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
538 hostval = val;
539 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
540 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
542 if (val != hostval)
543 return 1;
545 *p = hostval;
546 return 0;
549 /* Similar to save_expr, but assumes that arbitrary code is not executed
550 in between the multiple evaluations. In particular, we assume that a
551 non-addressable local variable will not be modified. */
553 static tree
554 builtin_save_expr (tree exp)
556 if (TREE_ADDRESSABLE (exp) == 0
557 && (TREE_CODE (exp) == PARM_DECL
558 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
559 return exp;
561 return save_expr (exp);
564 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
565 times to get the address of either a higher stack frame, or a return
566 address located within it (depending on FNDECL_CODE). */
568 static rtx
569 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
571 int i;
573 #ifdef INITIAL_FRAME_ADDRESS_RTX
574 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
575 #else
576 rtx tem;
578 /* For a zero count with __builtin_return_address, we don't care what
579 frame address we return, because target-specific definitions will
580 override us. Therefore frame pointer elimination is OK, and using
581 the soft frame pointer is OK.
583 For a nonzero count, or a zero count with __builtin_frame_address,
584 we require a stable offset from the current frame pointer to the
585 previous one, so we must use the hard frame pointer, and
586 we must disable frame pointer elimination. */
587 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
588 tem = frame_pointer_rtx;
589 else
591 tem = hard_frame_pointer_rtx;
593 /* Tell reload not to eliminate the frame pointer. */
594 crtl->accesses_prior_frames = 1;
596 #endif
598 /* Some machines need special handling before we can access
599 arbitrary frames. For example, on the SPARC, we must first flush
600 all register windows to the stack. */
601 #ifdef SETUP_FRAME_ADDRESSES
602 if (count > 0)
603 SETUP_FRAME_ADDRESSES ();
604 #endif
606 /* On the SPARC, the return address is not in the frame, it is in a
607 register. There is no way to access it off of the current frame
608 pointer, but it can be accessed off the previous frame pointer by
609 reading the value from the register window save area. */
610 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
611 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
612 count--;
613 #endif
615 /* Scan back COUNT frames to the specified frame. */
616 for (i = 0; i < count; i++)
618 /* Assume the dynamic chain pointer is in the word that the
619 frame address points to, unless otherwise specified. */
620 #ifdef DYNAMIC_CHAIN_ADDRESS
621 tem = DYNAMIC_CHAIN_ADDRESS (tem);
622 #endif
623 tem = memory_address (Pmode, tem);
624 tem = gen_frame_mem (Pmode, tem);
625 tem = copy_to_reg (tem);
628 /* For __builtin_frame_address, return what we've got. But, on
629 the SPARC for example, we may have to add a bias. */
630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
631 #ifdef FRAME_ADDR_RTX
632 return FRAME_ADDR_RTX (tem);
633 #else
634 return tem;
635 #endif
637 /* For __builtin_return_address, get the return address from that frame. */
638 #ifdef RETURN_ADDR_RTX
639 tem = RETURN_ADDR_RTX (count, tem);
640 #else
641 tem = memory_address (Pmode,
642 plus_constant (tem, GET_MODE_SIZE (Pmode)));
643 tem = gen_frame_mem (Pmode, tem);
644 #endif
645 return tem;
648 /* Alias set used for setjmp buffer. */
649 static alias_set_type setjmp_alias_set = -1;
651 /* Construct the leading half of a __builtin_setjmp call. Control will
652 return to RECEIVER_LABEL. This is also called directly by the SJLJ
653 exception handling code. */
655 void
656 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
658 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
659 rtx stack_save;
660 rtx mem;
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
665 buf_addr = convert_memory_address (Pmode, buf_addr);
667 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
669 /* We store the frame pointer and the address of receiver_label in
670 the buffer and use the rest of it for the stack save area, which
671 is machine-dependent. */
673 mem = gen_rtx_MEM (Pmode, buf_addr);
674 set_mem_alias_set (mem, setjmp_alias_set);
675 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
677 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
678 set_mem_alias_set (mem, setjmp_alias_set);
680 emit_move_insn (validize_mem (mem),
681 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
683 stack_save = gen_rtx_MEM (sa_mode,
684 plus_constant (buf_addr,
685 2 * GET_MODE_SIZE (Pmode)));
686 set_mem_alias_set (stack_save, setjmp_alias_set);
687 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
689 /* If there is further processing to do, do it. */
690 #ifdef HAVE_builtin_setjmp_setup
691 if (HAVE_builtin_setjmp_setup)
692 emit_insn (gen_builtin_setjmp_setup (buf_addr));
693 #endif
695 /* Tell optimize_save_area_alloca that extra work is going to
696 need to go on during alloca. */
697 cfun->calls_setjmp = 1;
699 /* We have a nonlocal label. */
700 cfun->has_nonlocal_label = 1;
703 /* Construct the trailing part of a __builtin_setjmp call. This is
704 also called directly by the SJLJ exception handling code. */
706 void
707 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
709 /* Clobber the FP when we get here, so we have to make sure it's
710 marked as used by this function. */
711 emit_use (hard_frame_pointer_rtx);
713 /* Mark the static chain as clobbered here so life information
714 doesn't get messed up for it. */
715 emit_clobber (static_chain_rtx);
717 /* Now put in the code to restore the frame pointer, and argument
718 pointer, if needed. */
719 #ifdef HAVE_nonlocal_goto
720 if (! HAVE_nonlocal_goto)
721 #endif
723 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
724 /* This might change the hard frame pointer in ways that aren't
725 apparent to early optimization passes, so force a clobber. */
726 emit_clobber (hard_frame_pointer_rtx);
729 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
730 if (fixed_regs[ARG_POINTER_REGNUM])
732 #ifdef ELIMINABLE_REGS
733 size_t i;
734 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
736 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
737 if (elim_regs[i].from == ARG_POINTER_REGNUM
738 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
739 break;
741 if (i == ARRAY_SIZE (elim_regs))
742 #endif
744 /* Now restore our arg pointer from the address at which it
745 was saved in our stack frame. */
746 emit_move_insn (virtual_incoming_args_rtx,
747 copy_to_reg (get_arg_pointer_save_area ()));
750 #endif
752 #ifdef HAVE_builtin_setjmp_receiver
753 if (HAVE_builtin_setjmp_receiver)
754 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
755 else
756 #endif
757 #ifdef HAVE_nonlocal_goto_receiver
758 if (HAVE_nonlocal_goto_receiver)
759 emit_insn (gen_nonlocal_goto_receiver ());
760 else
761 #endif
762 { /* Nothing */ }
764 /* We must not allow the code we just generated to be reordered by
765 scheduling. Specifically, the update of the frame pointer must
766 happen immediately, not later. */
767 emit_insn (gen_blockage ());
770 /* __builtin_longjmp is passed a pointer to an array of five words (not
771 all will be used on all machines). It operates similarly to the C
772 library function of the same name, but is more efficient. Much of
773 the code below is copied from the handling of non-local gotos. */
775 static void
776 expand_builtin_longjmp (rtx buf_addr, rtx value)
778 rtx fp, lab, stack, insn, last;
779 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
781 if (setjmp_alias_set == -1)
782 setjmp_alias_set = new_alias_set ();
784 buf_addr = convert_memory_address (Pmode, buf_addr);
786 buf_addr = force_reg (Pmode, buf_addr);
788 /* We used to store value in static_chain_rtx, but that fails if pointers
789 are smaller than integers. We instead require that the user must pass
790 a second argument of 1, because that is what builtin_setjmp will
791 return. This also makes EH slightly more efficient, since we are no
792 longer copying around a value that we don't care about. */
793 gcc_assert (value == const1_rtx);
795 last = get_last_insn ();
796 #ifdef HAVE_builtin_longjmp
797 if (HAVE_builtin_longjmp)
798 emit_insn (gen_builtin_longjmp (buf_addr));
799 else
800 #endif
802 fp = gen_rtx_MEM (Pmode, buf_addr);
803 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
804 GET_MODE_SIZE (Pmode)));
806 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (fp, setjmp_alias_set);
809 set_mem_alias_set (lab, setjmp_alias_set);
810 set_mem_alias_set (stack, setjmp_alias_set);
812 /* Pick up FP, label, and SP from the block and jump. This code is
813 from expand_goto in stmt.c; see there for detailed comments. */
814 #ifdef HAVE_nonlocal_goto
815 if (HAVE_nonlocal_goto)
816 /* We have to pass a value to the nonlocal_goto pattern that will
817 get copied into the static_chain pointer, but it does not matter
818 what that value is, because builtin_setjmp does not use it. */
819 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
820 else
821 #endif
823 lab = copy_to_reg (lab);
825 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
826 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
828 emit_move_insn (hard_frame_pointer_rtx, fp);
829 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
831 emit_use (hard_frame_pointer_rtx);
832 emit_use (stack_pointer_rtx);
833 emit_indirect_jump (lab);
837 /* Search backwards and mark the jump insn as a non-local goto.
838 Note that this precludes the use of __builtin_longjmp to a
839 __builtin_setjmp target in the same function. However, we've
840 already cautioned the user that these functions are for
841 internal exception handling use only. */
842 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
844 gcc_assert (insn != last);
846 if (JUMP_P (insn))
848 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
849 REG_NOTES (insn));
850 break;
852 else if (CALL_P (insn))
853 break;
857 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
858 and the address of the save area. */
860 static rtx
861 expand_builtin_nonlocal_goto (tree exp)
863 tree t_label, t_save_area;
864 rtx r_label, r_save_area, r_fp, r_sp, insn;
866 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
867 return NULL_RTX;
869 t_label = CALL_EXPR_ARG (exp, 0);
870 t_save_area = CALL_EXPR_ARG (exp, 1);
872 r_label = expand_normal (t_label);
873 r_label = convert_memory_address (Pmode, r_label);
874 r_save_area = expand_normal (t_save_area);
875 r_save_area = convert_memory_address (Pmode, r_save_area);
876 r_fp = gen_rtx_MEM (Pmode, r_save_area);
877 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
878 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
880 crtl->has_nonlocal_goto = 1;
882 #ifdef HAVE_nonlocal_goto
883 /* ??? We no longer need to pass the static chain value, afaik. */
884 if (HAVE_nonlocal_goto)
885 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
886 else
887 #endif
889 r_label = copy_to_reg (r_label);
891 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
892 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
894 /* Restore frame pointer for containing function.
895 This sets the actual hard register used for the frame pointer
896 to the location of the function's incoming static chain info.
897 The non-local goto handler will then adjust it to contain the
898 proper value and reload the argument pointer, if needed. */
899 emit_move_insn (hard_frame_pointer_rtx, r_fp);
900 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
902 /* USE of hard_frame_pointer_rtx added for consistency;
903 not clear if really needed. */
904 emit_use (hard_frame_pointer_rtx);
905 emit_use (stack_pointer_rtx);
907 /* If the architecture is using a GP register, we must
908 conservatively assume that the target function makes use of it.
909 The prologue of functions with nonlocal gotos must therefore
910 initialize the GP register to the appropriate value, and we
911 must then make sure that this value is live at the point
912 of the jump. (Note that this doesn't necessarily apply
913 to targets with a nonlocal_goto pattern; they are free
914 to implement it in their own way. Note also that this is
915 a no-op if the GP register is a global invariant.) */
916 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
917 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
918 emit_use (pic_offset_table_rtx);
920 emit_indirect_jump (r_label);
923 /* Search backwards to the jump insn and mark it as a
924 non-local goto. */
925 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
927 if (JUMP_P (insn))
929 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
930 const0_rtx, REG_NOTES (insn));
931 break;
933 else if (CALL_P (insn))
934 break;
937 return const0_rtx;
940 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
941 (not all will be used on all machines) that was passed to __builtin_setjmp.
942 It updates the stack pointer in that block to correspond to the current
943 stack pointer. */
945 static void
946 expand_builtin_update_setjmp_buf (rtx buf_addr)
948 enum machine_mode sa_mode = Pmode;
949 rtx stack_save;
952 #ifdef HAVE_save_stack_nonlocal
953 if (HAVE_save_stack_nonlocal)
954 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
955 #endif
956 #ifdef STACK_SAVEAREA_MODE
957 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
958 #endif
960 stack_save
961 = gen_rtx_MEM (sa_mode,
962 memory_address
963 (sa_mode,
964 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
966 #ifdef HAVE_setjmp
967 if (HAVE_setjmp)
968 emit_insn (gen_setjmp ());
969 #endif
971 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
974 /* Expand a call to __builtin_prefetch. For a target that does not support
975 data prefetch, evaluate the memory address argument in case it has side
976 effects. */
978 static void
979 expand_builtin_prefetch (tree exp)
981 tree arg0, arg1, arg2;
982 int nargs;
983 rtx op0, op1, op2;
985 if (!validate_arglist (exp, POINTER_TYPE, 0))
986 return;
988 arg0 = CALL_EXPR_ARG (exp, 0);
990 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
991 zero (read) and argument 2 (locality) defaults to 3 (high degree of
992 locality). */
993 nargs = call_expr_nargs (exp);
994 if (nargs > 1)
995 arg1 = CALL_EXPR_ARG (exp, 1);
996 else
997 arg1 = integer_zero_node;
998 if (nargs > 2)
999 arg2 = CALL_EXPR_ARG (exp, 2);
1000 else
1001 arg2 = build_int_cst (NULL_TREE, 3);
1003 /* Argument 0 is an address. */
1004 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1006 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1007 if (TREE_CODE (arg1) != INTEGER_CST)
1009 error ("second argument to %<__builtin_prefetch%> must be a constant");
1010 arg1 = integer_zero_node;
1012 op1 = expand_normal (arg1);
1013 /* Argument 1 must be either zero or one. */
1014 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1016 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1017 " using zero");
1018 op1 = const0_rtx;
1021 /* Argument 2 (locality) must be a compile-time constant int. */
1022 if (TREE_CODE (arg2) != INTEGER_CST)
1024 error ("third argument to %<__builtin_prefetch%> must be a constant");
1025 arg2 = integer_zero_node;
1027 op2 = expand_normal (arg2);
1028 /* Argument 2 must be 0, 1, 2, or 3. */
1029 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1031 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1032 op2 = const0_rtx;
1035 #ifdef HAVE_prefetch
1036 if (HAVE_prefetch)
1038 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1039 (op0,
1040 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1041 || (GET_MODE (op0) != Pmode))
1043 op0 = convert_memory_address (Pmode, op0);
1044 op0 = force_reg (Pmode, op0);
1046 emit_insn (gen_prefetch (op0, op1, op2));
1048 #endif
1050 /* Don't do anything with direct references to volatile memory, but
1051 generate code to handle other side effects. */
1052 if (!MEM_P (op0) && side_effects_p (op0))
1053 emit_insn (op0);
1056 /* Get a MEM rtx for expression EXP which is the address of an operand
1057 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1058 the maximum length of the block of memory that might be accessed or
1059 NULL if unknown. */
1061 static rtx
1062 get_memory_rtx (tree exp, tree len)
1064 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1065 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1067 /* Get an expression we can use to find the attributes to assign to MEM.
1068 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1069 we can. First remove any nops. */
1070 while (CONVERT_EXPR_P (exp)
1071 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1072 exp = TREE_OPERAND (exp, 0);
1074 if (TREE_CODE (exp) == ADDR_EXPR)
1075 exp = TREE_OPERAND (exp, 0);
1076 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1077 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1078 else
1079 exp = NULL;
1081 /* Honor attributes derived from exp, except for the alias set
1082 (as builtin stringops may alias with anything) and the size
1083 (as stringops may access multiple array elements). */
1084 if (exp)
1086 set_mem_attributes (mem, exp, 0);
1088 /* Allow the string and memory builtins to overflow from one
1089 field into another, see http://gcc.gnu.org/PR23561.
1090 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1091 memory accessed by the string or memory builtin will fit
1092 within the field. */
1093 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1095 tree mem_expr = MEM_EXPR (mem);
1096 HOST_WIDE_INT offset = -1, length = -1;
1097 tree inner = exp;
1099 while (TREE_CODE (inner) == ARRAY_REF
1100 || CONVERT_EXPR_P (inner)
1101 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1102 || TREE_CODE (inner) == SAVE_EXPR)
1103 inner = TREE_OPERAND (inner, 0);
1105 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1107 if (MEM_OFFSET (mem)
1108 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1109 offset = INTVAL (MEM_OFFSET (mem));
1111 if (offset >= 0 && len && host_integerp (len, 0))
1112 length = tree_low_cst (len, 0);
1114 while (TREE_CODE (inner) == COMPONENT_REF)
1116 tree field = TREE_OPERAND (inner, 1);
1117 gcc_assert (! DECL_BIT_FIELD (field));
1118 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1119 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1121 if (length >= 0
1122 && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1123 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1125 HOST_WIDE_INT size
1126 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1127 /* If we can prove the memory starting at XEXP (mem, 0)
1128 and ending at XEXP (mem, 0) + LENGTH will fit into
1129 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1130 if (offset <= size
1131 && length <= size
1132 && offset + length <= size)
1133 break;
1136 if (offset >= 0
1137 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1138 offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1139 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1140 / BITS_PER_UNIT;
1141 else
1143 offset = -1;
1144 length = -1;
1147 mem_expr = TREE_OPERAND (mem_expr, 0);
1148 inner = TREE_OPERAND (inner, 0);
1151 if (mem_expr == NULL)
1152 offset = -1;
1153 if (mem_expr != MEM_EXPR (mem))
1155 set_mem_expr (mem, mem_expr);
1156 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1159 set_mem_alias_set (mem, 0);
1160 set_mem_size (mem, NULL_RTX);
1163 return mem;
1166 /* Built-in functions to perform an untyped call and return. */
1168 /* For each register that may be used for calling a function, this
1169 gives a mode used to copy the register's value. VOIDmode indicates
1170 the register is not used for calling a function. If the machine
1171 has register windows, this gives only the outbound registers.
1172 INCOMING_REGNO gives the corresponding inbound register. */
1173 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1175 /* For each register that may be used for returning values, this gives
1176 a mode used to copy the register's value. VOIDmode indicates the
1177 register is not used for returning values. If the machine has
1178 register windows, this gives only the outbound registers.
1179 INCOMING_REGNO gives the corresponding inbound register. */
1180 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1182 /* For each register that may be used for calling a function, this
1183 gives the offset of that register into the block returned by
1184 __builtin_apply_args. 0 indicates that the register is not
1185 used for calling a function. */
1186 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1188 /* Return the size required for the block returned by __builtin_apply_args,
1189 and initialize apply_args_mode. */
1191 static int
1192 apply_args_size (void)
1194 static int size = -1;
1195 int align;
1196 unsigned int regno;
1197 enum machine_mode mode;
1199 /* The values computed by this function never change. */
1200 if (size < 0)
1202 /* The first value is the incoming arg-pointer. */
1203 size = GET_MODE_SIZE (Pmode);
1205 /* The second value is the structure value address unless this is
1206 passed as an "invisible" first argument. */
1207 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1208 size += GET_MODE_SIZE (Pmode);
1210 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1211 if (FUNCTION_ARG_REGNO_P (regno))
1213 mode = reg_raw_mode[regno];
1215 gcc_assert (mode != VOIDmode);
1217 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1218 if (size % align != 0)
1219 size = CEIL (size, align) * align;
1220 apply_args_reg_offset[regno] = size;
1221 size += GET_MODE_SIZE (mode);
1222 apply_args_mode[regno] = mode;
1224 else
1226 apply_args_mode[regno] = VOIDmode;
1227 apply_args_reg_offset[regno] = 0;
1230 return size;
1233 /* Return the size required for the block returned by __builtin_apply,
1234 and initialize apply_result_mode. */
1236 static int
1237 apply_result_size (void)
1239 static int size = -1;
1240 int align, regno;
1241 enum machine_mode mode;
1243 /* The values computed by this function never change. */
1244 if (size < 0)
1246 size = 0;
1248 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1249 if (FUNCTION_VALUE_REGNO_P (regno))
1251 mode = reg_raw_mode[regno];
1253 gcc_assert (mode != VOIDmode);
1255 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1256 if (size % align != 0)
1257 size = CEIL (size, align) * align;
1258 size += GET_MODE_SIZE (mode);
1259 apply_result_mode[regno] = mode;
1261 else
1262 apply_result_mode[regno] = VOIDmode;
1264 /* Allow targets that use untyped_call and untyped_return to override
1265 the size so that machine-specific information can be stored here. */
1266 #ifdef APPLY_RESULT_SIZE
1267 size = APPLY_RESULT_SIZE;
1268 #endif
1270 return size;
1273 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1274 /* Create a vector describing the result block RESULT. If SAVEP is true,
1275 the result block is used to save the values; otherwise it is used to
1276 restore the values. */
1278 static rtx
1279 result_vector (int savep, rtx result)
1281 int regno, size, align, nelts;
1282 enum machine_mode mode;
1283 rtx reg, mem;
1284 rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1286 size = nelts = 0;
1287 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1288 if ((mode = apply_result_mode[regno]) != VOIDmode)
1290 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1291 if (size % align != 0)
1292 size = CEIL (size, align) * align;
1293 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1294 mem = adjust_address (result, mode, size);
1295 savevec[nelts++] = (savep
1296 ? gen_rtx_SET (VOIDmode, mem, reg)
1297 : gen_rtx_SET (VOIDmode, reg, mem));
1298 size += GET_MODE_SIZE (mode);
1300 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1302 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1304 /* Save the state required to perform an untyped call with the same
1305 arguments as were passed to the current function. */
1307 static rtx
1308 expand_builtin_apply_args_1 (void)
1310 rtx registers, tem;
1311 int size, align, regno;
1312 enum machine_mode mode;
1313 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1315 /* Create a block where the arg-pointer, structure value address,
1316 and argument registers can be saved. */
1317 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1319 /* Walk past the arg-pointer and structure value address. */
1320 size = GET_MODE_SIZE (Pmode);
1321 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1322 size += GET_MODE_SIZE (Pmode);
1324 /* Save each register used in calling a function to the block. */
1325 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1326 if ((mode = apply_args_mode[regno]) != VOIDmode)
1328 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1329 if (size % align != 0)
1330 size = CEIL (size, align) * align;
1332 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1334 emit_move_insn (adjust_address (registers, mode, size), tem);
1335 size += GET_MODE_SIZE (mode);
1338 /* Save the arg pointer to the block. */
1339 tem = copy_to_reg (virtual_incoming_args_rtx);
1340 #ifdef STACK_GROWS_DOWNWARD
1341 /* We need the pointer as the caller actually passed them to us, not
1342 as we might have pretended they were passed. Make sure it's a valid
1343 operand, as emit_move_insn isn't expected to handle a PLUS. */
1345 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1346 NULL_RTX);
1347 #endif
1348 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1350 size = GET_MODE_SIZE (Pmode);
1352 /* Save the structure value address unless this is passed as an
1353 "invisible" first argument. */
1354 if (struct_incoming_value)
1356 emit_move_insn (adjust_address (registers, Pmode, size),
1357 copy_to_reg (struct_incoming_value));
1358 size += GET_MODE_SIZE (Pmode);
1361 /* Return the address of the block. */
1362 return copy_addr_to_reg (XEXP (registers, 0));
1365 /* __builtin_apply_args returns block of memory allocated on
1366 the stack into which is stored the arg pointer, structure
1367 value address, static chain, and all the registers that might
1368 possibly be used in performing a function call. The code is
1369 moved to the start of the function so the incoming values are
1370 saved. */
1372 static rtx
1373 expand_builtin_apply_args (void)
1375 /* Don't do __builtin_apply_args more than once in a function.
1376 Save the result of the first call and reuse it. */
1377 if (apply_args_value != 0)
1378 return apply_args_value;
1380 /* When this function is called, it means that registers must be
1381 saved on entry to this function. So we migrate the
1382 call to the first insn of this function. */
1383 rtx temp;
1384 rtx seq;
1386 start_sequence ();
1387 temp = expand_builtin_apply_args_1 ();
1388 seq = get_insns ();
1389 end_sequence ();
1391 apply_args_value = temp;
1393 /* Put the insns after the NOTE that starts the function.
1394 If this is inside a start_sequence, make the outer-level insn
1395 chain current, so the code is placed at the start of the
1396 function. */
1397 push_topmost_sequence ();
1398 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1399 pop_topmost_sequence ();
1400 return temp;
1404 /* Perform an untyped call and save the state required to perform an
1405 untyped return of whatever value was returned by the given function. */
1407 static rtx
1408 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1410 int size, align, regno;
1411 enum machine_mode mode;
1412 rtx incoming_args, result, reg, dest, src, call_insn;
1413 rtx old_stack_level = 0;
1414 rtx call_fusage = 0;
1415 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1417 arguments = convert_memory_address (Pmode, arguments);
1419 /* Create a block where the return registers can be saved. */
1420 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1422 /* Fetch the arg pointer from the ARGUMENTS block. */
1423 incoming_args = gen_reg_rtx (Pmode);
1424 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1425 #ifndef STACK_GROWS_DOWNWARD
1426 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1427 incoming_args, 0, OPTAB_LIB_WIDEN);
1428 #endif
1430 /* Push a new argument block and copy the arguments. Do not allow
1431 the (potential) memcpy call below to interfere with our stack
1432 manipulations. */
1433 do_pending_stack_adjust ();
1434 NO_DEFER_POP;
1436 /* Save the stack with nonlocal if available. */
1437 #ifdef HAVE_save_stack_nonlocal
1438 if (HAVE_save_stack_nonlocal)
1439 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1440 else
1441 #endif
1442 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1444 /* Allocate a block of memory onto the stack and copy the memory
1445 arguments to the outgoing arguments address. */
1446 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1447 dest = virtual_outgoing_args_rtx;
1448 #ifndef STACK_GROWS_DOWNWARD
1449 if (GET_CODE (argsize) == CONST_INT)
1450 dest = plus_constant (dest, -INTVAL (argsize));
1451 else
1452 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1453 #endif
1454 dest = gen_rtx_MEM (BLKmode, dest);
1455 set_mem_align (dest, PARM_BOUNDARY);
1456 src = gen_rtx_MEM (BLKmode, incoming_args);
1457 set_mem_align (src, PARM_BOUNDARY);
1458 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1460 /* Refer to the argument block. */
1461 apply_args_size ();
1462 arguments = gen_rtx_MEM (BLKmode, arguments);
1463 set_mem_align (arguments, PARM_BOUNDARY);
1465 /* Walk past the arg-pointer and structure value address. */
1466 size = GET_MODE_SIZE (Pmode);
1467 if (struct_value)
1468 size += GET_MODE_SIZE (Pmode);
1470 /* Restore each of the registers previously saved. Make USE insns
1471 for each of these registers for use in making the call. */
1472 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1473 if ((mode = apply_args_mode[regno]) != VOIDmode)
1475 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1476 if (size % align != 0)
1477 size = CEIL (size, align) * align;
1478 reg = gen_rtx_REG (mode, regno);
1479 emit_move_insn (reg, adjust_address (arguments, mode, size));
1480 use_reg (&call_fusage, reg);
1481 size += GET_MODE_SIZE (mode);
1484 /* Restore the structure value address unless this is passed as an
1485 "invisible" first argument. */
1486 size = GET_MODE_SIZE (Pmode);
1487 if (struct_value)
1489 rtx value = gen_reg_rtx (Pmode);
1490 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1491 emit_move_insn (struct_value, value);
1492 if (REG_P (struct_value))
1493 use_reg (&call_fusage, struct_value);
1494 size += GET_MODE_SIZE (Pmode);
1497 /* All arguments and registers used for the call are set up by now! */
1498 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1500 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1501 and we don't want to load it into a register as an optimization,
1502 because prepare_call_address already did it if it should be done. */
1503 if (GET_CODE (function) != SYMBOL_REF)
1504 function = memory_address (FUNCTION_MODE, function);
1506 /* Generate the actual call instruction and save the return value. */
1507 #ifdef HAVE_untyped_call
1508 if (HAVE_untyped_call)
1509 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1510 result, result_vector (1, result)));
1511 else
1512 #endif
1513 #ifdef HAVE_call_value
1514 if (HAVE_call_value)
1516 rtx valreg = 0;
1518 /* Locate the unique return register. It is not possible to
1519 express a call that sets more than one return register using
1520 call_value; use untyped_call for that. In fact, untyped_call
1521 only needs to save the return registers in the given block. */
1522 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1523 if ((mode = apply_result_mode[regno]) != VOIDmode)
1525 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1527 valreg = gen_rtx_REG (mode, regno);
1530 emit_call_insn (GEN_CALL_VALUE (valreg,
1531 gen_rtx_MEM (FUNCTION_MODE, function),
1532 const0_rtx, NULL_RTX, const0_rtx));
1534 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1536 else
1537 #endif
1538 gcc_unreachable ();
1540 /* Find the CALL insn we just emitted, and attach the register usage
1541 information. */
1542 call_insn = last_call_insn ();
1543 add_function_usage_to (call_insn, call_fusage);
1545 /* Restore the stack. */
1546 #ifdef HAVE_save_stack_nonlocal
1547 if (HAVE_save_stack_nonlocal)
1548 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1549 else
1550 #endif
1551 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1553 OK_DEFER_POP;
1555 /* Return the address of the result block. */
1556 result = copy_addr_to_reg (XEXP (result, 0));
1557 return convert_memory_address (ptr_mode, result);
1560 /* Perform an untyped return. */
1562 static void
1563 expand_builtin_return (rtx result)
1565 int size, align, regno;
1566 enum machine_mode mode;
1567 rtx reg;
1568 rtx call_fusage = 0;
1570 result = convert_memory_address (Pmode, result);
1572 apply_result_size ();
1573 result = gen_rtx_MEM (BLKmode, result);
1575 #ifdef HAVE_untyped_return
1576 if (HAVE_untyped_return)
1578 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1579 emit_barrier ();
1580 return;
1582 #endif
1584 /* Restore the return value and note that each value is used. */
1585 size = 0;
1586 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1587 if ((mode = apply_result_mode[regno]) != VOIDmode)
1589 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1590 if (size % align != 0)
1591 size = CEIL (size, align) * align;
1592 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1593 emit_move_insn (reg, adjust_address (result, mode, size));
1595 push_to_sequence (call_fusage);
1596 emit_use (reg);
1597 call_fusage = get_insns ();
1598 end_sequence ();
1599 size += GET_MODE_SIZE (mode);
1602 /* Put the USE insns before the return. */
1603 emit_insn (call_fusage);
1605 /* Return whatever values was restored by jumping directly to the end
1606 of the function. */
1607 expand_naked_return ();
1610 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1612 static enum type_class
1613 type_to_class (tree type)
1615 switch (TREE_CODE (type))
1617 case VOID_TYPE: return void_type_class;
1618 case INTEGER_TYPE: return integer_type_class;
1619 case ENUMERAL_TYPE: return enumeral_type_class;
1620 case BOOLEAN_TYPE: return boolean_type_class;
1621 case POINTER_TYPE: return pointer_type_class;
1622 case REFERENCE_TYPE: return reference_type_class;
1623 case OFFSET_TYPE: return offset_type_class;
1624 case REAL_TYPE: return real_type_class;
1625 case COMPLEX_TYPE: return complex_type_class;
1626 case FUNCTION_TYPE: return function_type_class;
1627 case METHOD_TYPE: return method_type_class;
1628 case RECORD_TYPE: return record_type_class;
1629 case UNION_TYPE:
1630 case QUAL_UNION_TYPE: return union_type_class;
1631 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1632 ? string_type_class : array_type_class);
1633 case LANG_TYPE: return lang_type_class;
1634 default: return no_type_class;
1638 /* Expand a call EXP to __builtin_classify_type. */
1640 static rtx
1641 expand_builtin_classify_type (tree exp)
1643 if (call_expr_nargs (exp))
1644 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1645 return GEN_INT (no_type_class);
1648 /* This helper macro, meant to be used in mathfn_built_in below,
1649 determines which among a set of three builtin math functions is
1650 appropriate for a given type mode. The `F' and `L' cases are
1651 automatically generated from the `double' case. */
1652 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1653 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1654 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1655 fcodel = BUILT_IN_MATHFN##L ; break;
1656 /* Similar to above, but appends _R after any F/L suffix. */
1657 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1658 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1659 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1660 fcodel = BUILT_IN_MATHFN##L_R ; break;
1662 /* Return mathematic function equivalent to FN but operating directly
1663 on TYPE, if available. If IMPLICIT is true find the function in
1664 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1665 can't do the conversion, return zero. */
1667 static tree
1668 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1670 tree const *const fn_arr
1671 = implicit ? implicit_built_in_decls : built_in_decls;
1672 enum built_in_function fcode, fcodef, fcodel;
1674 switch (fn)
1676 CASE_MATHFN (BUILT_IN_ACOS)
1677 CASE_MATHFN (BUILT_IN_ACOSH)
1678 CASE_MATHFN (BUILT_IN_ASIN)
1679 CASE_MATHFN (BUILT_IN_ASINH)
1680 CASE_MATHFN (BUILT_IN_ATAN)
1681 CASE_MATHFN (BUILT_IN_ATAN2)
1682 CASE_MATHFN (BUILT_IN_ATANH)
1683 CASE_MATHFN (BUILT_IN_CBRT)
1684 CASE_MATHFN (BUILT_IN_CEIL)
1685 CASE_MATHFN (BUILT_IN_CEXPI)
1686 CASE_MATHFN (BUILT_IN_COPYSIGN)
1687 CASE_MATHFN (BUILT_IN_COS)
1688 CASE_MATHFN (BUILT_IN_COSH)
1689 CASE_MATHFN (BUILT_IN_DREM)
1690 CASE_MATHFN (BUILT_IN_ERF)
1691 CASE_MATHFN (BUILT_IN_ERFC)
1692 CASE_MATHFN (BUILT_IN_EXP)
1693 CASE_MATHFN (BUILT_IN_EXP10)
1694 CASE_MATHFN (BUILT_IN_EXP2)
1695 CASE_MATHFN (BUILT_IN_EXPM1)
1696 CASE_MATHFN (BUILT_IN_FABS)
1697 CASE_MATHFN (BUILT_IN_FDIM)
1698 CASE_MATHFN (BUILT_IN_FLOOR)
1699 CASE_MATHFN (BUILT_IN_FMA)
1700 CASE_MATHFN (BUILT_IN_FMAX)
1701 CASE_MATHFN (BUILT_IN_FMIN)
1702 CASE_MATHFN (BUILT_IN_FMOD)
1703 CASE_MATHFN (BUILT_IN_FREXP)
1704 CASE_MATHFN (BUILT_IN_GAMMA)
1705 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1706 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1707 CASE_MATHFN (BUILT_IN_HYPOT)
1708 CASE_MATHFN (BUILT_IN_ILOGB)
1709 CASE_MATHFN (BUILT_IN_INF)
1710 CASE_MATHFN (BUILT_IN_ISINF)
1711 CASE_MATHFN (BUILT_IN_J0)
1712 CASE_MATHFN (BUILT_IN_J1)
1713 CASE_MATHFN (BUILT_IN_JN)
1714 CASE_MATHFN (BUILT_IN_LCEIL)
1715 CASE_MATHFN (BUILT_IN_LDEXP)
1716 CASE_MATHFN (BUILT_IN_LFLOOR)
1717 CASE_MATHFN (BUILT_IN_LGAMMA)
1718 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1719 CASE_MATHFN (BUILT_IN_LLCEIL)
1720 CASE_MATHFN (BUILT_IN_LLFLOOR)
1721 CASE_MATHFN (BUILT_IN_LLRINT)
1722 CASE_MATHFN (BUILT_IN_LLROUND)
1723 CASE_MATHFN (BUILT_IN_LOG)
1724 CASE_MATHFN (BUILT_IN_LOG10)
1725 CASE_MATHFN (BUILT_IN_LOG1P)
1726 CASE_MATHFN (BUILT_IN_LOG2)
1727 CASE_MATHFN (BUILT_IN_LOGB)
1728 CASE_MATHFN (BUILT_IN_LRINT)
1729 CASE_MATHFN (BUILT_IN_LROUND)
1730 CASE_MATHFN (BUILT_IN_MODF)
1731 CASE_MATHFN (BUILT_IN_NAN)
1732 CASE_MATHFN (BUILT_IN_NANS)
1733 CASE_MATHFN (BUILT_IN_NEARBYINT)
1734 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1735 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1736 CASE_MATHFN (BUILT_IN_POW)
1737 CASE_MATHFN (BUILT_IN_POWI)
1738 CASE_MATHFN (BUILT_IN_POW10)
1739 CASE_MATHFN (BUILT_IN_REMAINDER)
1740 CASE_MATHFN (BUILT_IN_REMQUO)
1741 CASE_MATHFN (BUILT_IN_RINT)
1742 CASE_MATHFN (BUILT_IN_ROUND)
1743 CASE_MATHFN (BUILT_IN_SCALB)
1744 CASE_MATHFN (BUILT_IN_SCALBLN)
1745 CASE_MATHFN (BUILT_IN_SCALBN)
1746 CASE_MATHFN (BUILT_IN_SIGNBIT)
1747 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1748 CASE_MATHFN (BUILT_IN_SIN)
1749 CASE_MATHFN (BUILT_IN_SINCOS)
1750 CASE_MATHFN (BUILT_IN_SINH)
1751 CASE_MATHFN (BUILT_IN_SQRT)
1752 CASE_MATHFN (BUILT_IN_TAN)
1753 CASE_MATHFN (BUILT_IN_TANH)
1754 CASE_MATHFN (BUILT_IN_TGAMMA)
1755 CASE_MATHFN (BUILT_IN_TRUNC)
1756 CASE_MATHFN (BUILT_IN_Y0)
1757 CASE_MATHFN (BUILT_IN_Y1)
1758 CASE_MATHFN (BUILT_IN_YN)
1760 default:
1761 return NULL_TREE;
1764 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1765 return fn_arr[fcode];
1766 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1767 return fn_arr[fcodef];
1768 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1769 return fn_arr[fcodel];
1770 else
1771 return NULL_TREE;
1774 /* Like mathfn_built_in_1(), but always use the implicit array. */
1776 tree
1777 mathfn_built_in (tree type, enum built_in_function fn)
1779 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1782 /* If errno must be maintained, expand the RTL to check if the result,
1783 TARGET, of a built-in function call, EXP, is NaN, and if so set
1784 errno to EDOM. */
1786 static void
1787 expand_errno_check (tree exp, rtx target)
1789 rtx lab = gen_label_rtx ();
1791 /* Test the result; if it is NaN, set errno=EDOM because
1792 the argument was not in the domain. */
1793 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1794 0, lab);
1796 #ifdef TARGET_EDOM
1797 /* If this built-in doesn't throw an exception, set errno directly. */
1798 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1800 #ifdef GEN_ERRNO_RTX
1801 rtx errno_rtx = GEN_ERRNO_RTX;
1802 #else
1803 rtx errno_rtx
1804 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1805 #endif
1806 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1807 emit_label (lab);
1808 return;
1810 #endif
1812 /* Make sure the library call isn't expanded as a tail call. */
1813 CALL_EXPR_TAILCALL (exp) = 0;
1815 /* We can't set errno=EDOM directly; let the library call do it.
1816 Pop the arguments right away in case the call gets deleted. */
1817 NO_DEFER_POP;
1818 expand_call (exp, target, 0);
1819 OK_DEFER_POP;
1820 emit_label (lab);
1823 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1824 Return NULL_RTX if a normal call should be emitted rather than expanding
1825 the function in-line. EXP is the expression that is a call to the builtin
1826 function; if convenient, the result should be placed in TARGET.
1827 SUBTARGET may be used as the target for computing one of EXP's operands. */
1829 static rtx
1830 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1832 optab builtin_optab;
1833 rtx op0, insns, before_call;
1834 tree fndecl = get_callee_fndecl (exp);
1835 enum machine_mode mode;
1836 bool errno_set = false;
1837 tree arg;
1839 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1840 return NULL_RTX;
1842 arg = CALL_EXPR_ARG (exp, 0);
1844 switch (DECL_FUNCTION_CODE (fndecl))
1846 CASE_FLT_FN (BUILT_IN_SQRT):
1847 errno_set = ! tree_expr_nonnegative_p (arg);
1848 builtin_optab = sqrt_optab;
1849 break;
1850 CASE_FLT_FN (BUILT_IN_EXP):
1851 errno_set = true; builtin_optab = exp_optab; break;
1852 CASE_FLT_FN (BUILT_IN_EXP10):
1853 CASE_FLT_FN (BUILT_IN_POW10):
1854 errno_set = true; builtin_optab = exp10_optab; break;
1855 CASE_FLT_FN (BUILT_IN_EXP2):
1856 errno_set = true; builtin_optab = exp2_optab; break;
1857 CASE_FLT_FN (BUILT_IN_EXPM1):
1858 errno_set = true; builtin_optab = expm1_optab; break;
1859 CASE_FLT_FN (BUILT_IN_LOGB):
1860 errno_set = true; builtin_optab = logb_optab; break;
1861 CASE_FLT_FN (BUILT_IN_LOG):
1862 errno_set = true; builtin_optab = log_optab; break;
1863 CASE_FLT_FN (BUILT_IN_LOG10):
1864 errno_set = true; builtin_optab = log10_optab; break;
1865 CASE_FLT_FN (BUILT_IN_LOG2):
1866 errno_set = true; builtin_optab = log2_optab; break;
1867 CASE_FLT_FN (BUILT_IN_LOG1P):
1868 errno_set = true; builtin_optab = log1p_optab; break;
1869 CASE_FLT_FN (BUILT_IN_ASIN):
1870 builtin_optab = asin_optab; break;
1871 CASE_FLT_FN (BUILT_IN_ACOS):
1872 builtin_optab = acos_optab; break;
1873 CASE_FLT_FN (BUILT_IN_TAN):
1874 builtin_optab = tan_optab; break;
1875 CASE_FLT_FN (BUILT_IN_ATAN):
1876 builtin_optab = atan_optab; break;
1877 CASE_FLT_FN (BUILT_IN_FLOOR):
1878 builtin_optab = floor_optab; break;
1879 CASE_FLT_FN (BUILT_IN_CEIL):
1880 builtin_optab = ceil_optab; break;
1881 CASE_FLT_FN (BUILT_IN_TRUNC):
1882 builtin_optab = btrunc_optab; break;
1883 CASE_FLT_FN (BUILT_IN_ROUND):
1884 builtin_optab = round_optab; break;
1885 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1886 builtin_optab = nearbyint_optab;
1887 if (flag_trapping_math)
1888 break;
1889 /* Else fallthrough and expand as rint. */
1890 CASE_FLT_FN (BUILT_IN_RINT):
1891 builtin_optab = rint_optab; break;
1892 default:
1893 gcc_unreachable ();
1896 /* Make a suitable register to place result in. */
1897 mode = TYPE_MODE (TREE_TYPE (exp));
1899 if (! flag_errno_math || ! HONOR_NANS (mode))
1900 errno_set = false;
1902 /* Before working hard, check whether the instruction is available. */
1903 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1905 target = gen_reg_rtx (mode);
1907 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1908 need to expand the argument again. This way, we will not perform
1909 side-effects more the once. */
1910 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1912 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1914 start_sequence ();
1916 /* Compute into TARGET.
1917 Set TARGET to wherever the result comes back. */
1918 target = expand_unop (mode, builtin_optab, op0, target, 0);
1920 if (target != 0)
1922 if (errno_set)
1923 expand_errno_check (exp, target);
1925 /* Output the entire sequence. */
1926 insns = get_insns ();
1927 end_sequence ();
1928 emit_insn (insns);
1929 return target;
1932 /* If we were unable to expand via the builtin, stop the sequence
1933 (without outputting the insns) and call to the library function
1934 with the stabilized argument list. */
1935 end_sequence ();
1938 before_call = get_last_insn ();
1940 target = expand_call (exp, target, target == const0_rtx);
1942 /* If this is a sqrt operation and we don't care about errno, try to
1943 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1944 This allows the semantics of the libcall to be visible to the RTL
1945 optimizers. */
1946 if (builtin_optab == sqrt_optab && !errno_set)
1948 /* Search backwards through the insns emitted by expand_call looking
1949 for the instruction with the REG_RETVAL note. */
1950 rtx last = get_last_insn ();
1951 while (last != before_call)
1953 if (find_reg_note (last, REG_RETVAL, NULL))
1955 rtx note = find_reg_note (last, REG_EQUAL, NULL);
1956 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1957 two elements, i.e. symbol_ref(sqrt) and the operand. */
1958 if (note
1959 && GET_CODE (note) == EXPR_LIST
1960 && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1961 && XEXP (XEXP (note, 0), 1) != NULL_RTX
1962 && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1964 rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1965 /* Check operand is a register with expected mode. */
1966 if (operand
1967 && REG_P (operand)
1968 && GET_MODE (operand) == mode)
1970 /* Replace the REG_EQUAL note with a SQRT rtx. */
1971 rtx equiv = gen_rtx_SQRT (mode, operand);
1972 set_unique_reg_note (last, REG_EQUAL, equiv);
1975 break;
1977 last = PREV_INSN (last);
1981 return target;
1984 /* Expand a call to the builtin binary math functions (pow and atan2).
1985 Return NULL_RTX if a normal call should be emitted rather than expanding the
1986 function in-line. EXP is the expression that is a call to the builtin
1987 function; if convenient, the result should be placed in TARGET.
1988 SUBTARGET may be used as the target for computing one of EXP's
1989 operands. */
1991 static rtx
1992 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1994 optab builtin_optab;
1995 rtx op0, op1, insns;
1996 int op1_type = REAL_TYPE;
1997 tree fndecl = get_callee_fndecl (exp);
1998 tree arg0, arg1;
1999 enum machine_mode mode;
2000 bool errno_set = true;
2002 switch (DECL_FUNCTION_CODE (fndecl))
2004 CASE_FLT_FN (BUILT_IN_SCALBN):
2005 CASE_FLT_FN (BUILT_IN_SCALBLN):
2006 CASE_FLT_FN (BUILT_IN_LDEXP):
2007 op1_type = INTEGER_TYPE;
2008 default:
2009 break;
2012 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2013 return NULL_RTX;
2015 arg0 = CALL_EXPR_ARG (exp, 0);
2016 arg1 = CALL_EXPR_ARG (exp, 1);
2018 switch (DECL_FUNCTION_CODE (fndecl))
2020 CASE_FLT_FN (BUILT_IN_POW):
2021 builtin_optab = pow_optab; break;
2022 CASE_FLT_FN (BUILT_IN_ATAN2):
2023 builtin_optab = atan2_optab; break;
2024 CASE_FLT_FN (BUILT_IN_SCALB):
2025 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2026 return 0;
2027 builtin_optab = scalb_optab; break;
2028 CASE_FLT_FN (BUILT_IN_SCALBN):
2029 CASE_FLT_FN (BUILT_IN_SCALBLN):
2030 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2031 return 0;
2032 /* Fall through... */
2033 CASE_FLT_FN (BUILT_IN_LDEXP):
2034 builtin_optab = ldexp_optab; break;
2035 CASE_FLT_FN (BUILT_IN_FMOD):
2036 builtin_optab = fmod_optab; break;
2037 CASE_FLT_FN (BUILT_IN_REMAINDER):
2038 CASE_FLT_FN (BUILT_IN_DREM):
2039 builtin_optab = remainder_optab; break;
2040 default:
2041 gcc_unreachable ();
2044 /* Make a suitable register to place result in. */
2045 mode = TYPE_MODE (TREE_TYPE (exp));
2047 /* Before working hard, check whether the instruction is available. */
2048 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2049 return NULL_RTX;
2051 target = gen_reg_rtx (mode);
2053 if (! flag_errno_math || ! HONOR_NANS (mode))
2054 errno_set = false;
2056 /* Always stabilize the argument list. */
2057 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2058 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2060 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2061 op1 = expand_normal (arg1);
2063 start_sequence ();
2065 /* Compute into TARGET.
2066 Set TARGET to wherever the result comes back. */
2067 target = expand_binop (mode, builtin_optab, op0, op1,
2068 target, 0, OPTAB_DIRECT);
2070 /* If we were unable to expand via the builtin, stop the sequence
2071 (without outputting the insns) and call to the library function
2072 with the stabilized argument list. */
2073 if (target == 0)
2075 end_sequence ();
2076 return expand_call (exp, target, target == const0_rtx);
2079 if (errno_set)
2080 expand_errno_check (exp, target);
2082 /* Output the entire sequence. */
2083 insns = get_insns ();
2084 end_sequence ();
2085 emit_insn (insns);
2087 return target;
2090 /* Expand a call to the builtin sin and cos math functions.
2091 Return NULL_RTX if a normal call should be emitted rather than expanding the
2092 function in-line. EXP is the expression that is a call to the builtin
2093 function; if convenient, the result should be placed in TARGET.
2094 SUBTARGET may be used as the target for computing one of EXP's
2095 operands. */
2097 static rtx
2098 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2100 optab builtin_optab;
2101 rtx op0, insns;
2102 tree fndecl = get_callee_fndecl (exp);
2103 enum machine_mode mode;
2104 tree arg;
2106 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2107 return NULL_RTX;
2109 arg = CALL_EXPR_ARG (exp, 0);
2111 switch (DECL_FUNCTION_CODE (fndecl))
2113 CASE_FLT_FN (BUILT_IN_SIN):
2114 CASE_FLT_FN (BUILT_IN_COS):
2115 builtin_optab = sincos_optab; break;
2116 default:
2117 gcc_unreachable ();
2120 /* Make a suitable register to place result in. */
2121 mode = TYPE_MODE (TREE_TYPE (exp));
2123 /* Check if sincos insn is available, otherwise fallback
2124 to sin or cos insn. */
2125 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2126 switch (DECL_FUNCTION_CODE (fndecl))
2128 CASE_FLT_FN (BUILT_IN_SIN):
2129 builtin_optab = sin_optab; break;
2130 CASE_FLT_FN (BUILT_IN_COS):
2131 builtin_optab = cos_optab; break;
2132 default:
2133 gcc_unreachable ();
2136 /* Before working hard, check whether the instruction is available. */
2137 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2139 target = gen_reg_rtx (mode);
2141 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2142 need to expand the argument again. This way, we will not perform
2143 side-effects more the once. */
2144 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2146 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2148 start_sequence ();
2150 /* Compute into TARGET.
2151 Set TARGET to wherever the result comes back. */
2152 if (builtin_optab == sincos_optab)
2154 int result;
2156 switch (DECL_FUNCTION_CODE (fndecl))
2158 CASE_FLT_FN (BUILT_IN_SIN):
2159 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2160 break;
2161 CASE_FLT_FN (BUILT_IN_COS):
2162 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2163 break;
2164 default:
2165 gcc_unreachable ();
2167 gcc_assert (result);
2169 else
2171 target = expand_unop (mode, builtin_optab, op0, target, 0);
2174 if (target != 0)
2176 /* Output the entire sequence. */
2177 insns = get_insns ();
2178 end_sequence ();
2179 emit_insn (insns);
2180 return target;
2183 /* If we were unable to expand via the builtin, stop the sequence
2184 (without outputting the insns) and call to the library function
2185 with the stabilized argument list. */
2186 end_sequence ();
2189 target = expand_call (exp, target, target == const0_rtx);
2191 return target;
2194 /* Expand a call to one of the builtin math functions that operate on
2195 floating point argument and output an integer result (ilogb, isinf,
2196 isnan, etc).
2197 Return 0 if a normal call should be emitted rather than expanding the
2198 function in-line. EXP is the expression that is a call to the builtin
2199 function; if convenient, the result should be placed in TARGET.
2200 SUBTARGET may be used as the target for computing one of EXP's operands. */
2202 static rtx
2203 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2205 optab builtin_optab = 0;
2206 enum insn_code icode = CODE_FOR_nothing;
2207 rtx op0;
2208 tree fndecl = get_callee_fndecl (exp);
2209 enum machine_mode mode;
2210 bool errno_set = false;
2211 tree arg;
2213 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2214 return NULL_RTX;
2216 arg = CALL_EXPR_ARG (exp, 0);
2218 switch (DECL_FUNCTION_CODE (fndecl))
2220 CASE_FLT_FN (BUILT_IN_ILOGB):
2221 errno_set = true; builtin_optab = ilogb_optab; break;
2222 CASE_FLT_FN (BUILT_IN_ISINF):
2223 builtin_optab = isinf_optab; break;
2224 case BUILT_IN_ISNORMAL:
2225 case BUILT_IN_ISFINITE:
2226 CASE_FLT_FN (BUILT_IN_FINITE):
2227 /* These builtins have no optabs (yet). */
2228 break;
2229 default:
2230 gcc_unreachable ();
2233 /* There's no easy way to detect the case we need to set EDOM. */
2234 if (flag_errno_math && errno_set)
2235 return NULL_RTX;
2237 /* Optab mode depends on the mode of the input argument. */
2238 mode = TYPE_MODE (TREE_TYPE (arg));
2240 if (builtin_optab)
2241 icode = optab_handler (builtin_optab, mode)->insn_code;
2243 /* Before working hard, check whether the instruction is available. */
2244 if (icode != CODE_FOR_nothing)
2246 /* Make a suitable register to place result in. */
2247 if (!target
2248 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2249 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2251 gcc_assert (insn_data[icode].operand[0].predicate
2252 (target, GET_MODE (target)));
2254 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2255 need to expand the argument again. This way, we will not perform
2256 side-effects more the once. */
2257 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2259 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2261 if (mode != GET_MODE (op0))
2262 op0 = convert_to_mode (mode, op0, 0);
2264 /* Compute into TARGET.
2265 Set TARGET to wherever the result comes back. */
2266 emit_unop_insn (icode, target, op0, UNKNOWN);
2267 return target;
2270 /* If there is no optab, try generic code. */
2271 switch (DECL_FUNCTION_CODE (fndecl))
2273 tree result;
2275 CASE_FLT_FN (BUILT_IN_ISINF):
2277 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2278 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2279 tree const type = TREE_TYPE (arg);
2280 REAL_VALUE_TYPE r;
2281 char buf[128];
2283 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2284 real_from_string (&r, buf);
2285 result = build_call_expr (isgr_fn, 2,
2286 fold_build1 (ABS_EXPR, type, arg),
2287 build_real (type, r));
2288 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2290 CASE_FLT_FN (BUILT_IN_FINITE):
2291 case BUILT_IN_ISFINITE:
2293 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2294 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2295 tree const type = TREE_TYPE (arg);
2296 REAL_VALUE_TYPE r;
2297 char buf[128];
2299 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2300 real_from_string (&r, buf);
2301 result = build_call_expr (isle_fn, 2,
2302 fold_build1 (ABS_EXPR, type, arg),
2303 build_real (type, r));
2304 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2306 case BUILT_IN_ISNORMAL:
2308 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2309 islessequal(fabs(x),DBL_MAX). */
2310 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2311 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2312 tree const type = TREE_TYPE (arg);
2313 REAL_VALUE_TYPE rmax, rmin;
2314 char buf[128];
2316 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2317 real_from_string (&rmax, buf);
2318 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2319 real_from_string (&rmin, buf);
2320 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2321 result = build_call_expr (isle_fn, 2, arg,
2322 build_real (type, rmax));
2323 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2324 build_call_expr (isge_fn, 2, arg,
2325 build_real (type, rmin)));
2326 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2328 default:
2329 break;
2332 target = expand_call (exp, target, target == const0_rtx);
2334 return target;
2337 /* Expand a call to the builtin sincos math function.
2338 Return NULL_RTX if a normal call should be emitted rather than expanding the
2339 function in-line. EXP is the expression that is a call to the builtin
2340 function. */
2342 static rtx
2343 expand_builtin_sincos (tree exp)
2345 rtx op0, op1, op2, target1, target2;
2346 enum machine_mode mode;
2347 tree arg, sinp, cosp;
2348 int result;
2350 if (!validate_arglist (exp, REAL_TYPE,
2351 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2352 return NULL_RTX;
2354 arg = CALL_EXPR_ARG (exp, 0);
2355 sinp = CALL_EXPR_ARG (exp, 1);
2356 cosp = CALL_EXPR_ARG (exp, 2);
2358 /* Make a suitable register to place result in. */
2359 mode = TYPE_MODE (TREE_TYPE (arg));
2361 /* Check if sincos insn is available, otherwise emit the call. */
2362 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2363 return NULL_RTX;
2365 target1 = gen_reg_rtx (mode);
2366 target2 = gen_reg_rtx (mode);
2368 op0 = expand_normal (arg);
2369 op1 = expand_normal (build_fold_indirect_ref (sinp));
2370 op2 = expand_normal (build_fold_indirect_ref (cosp));
2372 /* Compute into target1 and target2.
2373 Set TARGET to wherever the result comes back. */
2374 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2375 gcc_assert (result);
2377 /* Move target1 and target2 to the memory locations indicated
2378 by op1 and op2. */
2379 emit_move_insn (op1, target1);
2380 emit_move_insn (op2, target2);
2382 return const0_rtx;
2385 /* Expand a call to the internal cexpi builtin to the sincos math function.
2386 EXP is the expression that is a call to the builtin function; if convenient,
2387 the result should be placed in TARGET. SUBTARGET may be used as the target
2388 for computing one of EXP's operands. */
2390 static rtx
2391 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2393 tree fndecl = get_callee_fndecl (exp);
2394 tree arg, type;
2395 enum machine_mode mode;
2396 rtx op0, op1, op2;
2398 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2399 return NULL_RTX;
2401 arg = CALL_EXPR_ARG (exp, 0);
2402 type = TREE_TYPE (arg);
2403 mode = TYPE_MODE (TREE_TYPE (arg));
2405 /* Try expanding via a sincos optab, fall back to emitting a libcall
2406 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2407 is only generated from sincos, cexp or if we have either of them. */
2408 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2410 op1 = gen_reg_rtx (mode);
2411 op2 = gen_reg_rtx (mode);
2413 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2415 /* Compute into op1 and op2. */
2416 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2418 else if (TARGET_HAS_SINCOS)
2420 tree call, fn = NULL_TREE;
2421 tree top1, top2;
2422 rtx op1a, op2a;
2424 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2425 fn = built_in_decls[BUILT_IN_SINCOSF];
2426 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2427 fn = built_in_decls[BUILT_IN_SINCOS];
2428 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2429 fn = built_in_decls[BUILT_IN_SINCOSL];
2430 else
2431 gcc_unreachable ();
2433 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2434 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2435 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2436 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2437 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2438 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2440 /* Make sure not to fold the sincos call again. */
2441 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2442 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2443 call, 3, arg, top1, top2));
2445 else
2447 tree call, fn = NULL_TREE, narg;
2448 tree ctype = build_complex_type (type);
2450 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2451 fn = built_in_decls[BUILT_IN_CEXPF];
2452 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2453 fn = built_in_decls[BUILT_IN_CEXP];
2454 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2455 fn = built_in_decls[BUILT_IN_CEXPL];
2456 else
2457 gcc_unreachable ();
2459 /* If we don't have a decl for cexp create one. This is the
2460 friendliest fallback if the user calls __builtin_cexpi
2461 without full target C99 function support. */
2462 if (fn == NULL_TREE)
2464 tree fntype;
2465 const char *name = NULL;
2467 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2468 name = "cexpf";
2469 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2470 name = "cexp";
2471 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2472 name = "cexpl";
2474 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2475 fn = build_fn_decl (name, fntype);
2478 narg = fold_build2 (COMPLEX_EXPR, ctype,
2479 build_real (type, dconst0), arg);
2481 /* Make sure not to fold the cexp call again. */
2482 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2483 return expand_expr (build_call_nary (ctype, call, 1, narg),
2484 target, VOIDmode, EXPAND_NORMAL);
2487 /* Now build the proper return type. */
2488 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2489 make_tree (TREE_TYPE (arg), op2),
2490 make_tree (TREE_TYPE (arg), op1)),
2491 target, VOIDmode, EXPAND_NORMAL);
2494 /* Expand a call to one of the builtin rounding functions gcc defines
2495 as an extension (lfloor and lceil). As these are gcc extensions we
2496 do not need to worry about setting errno to EDOM.
2497 If expanding via optab fails, lower expression to (int)(floor(x)).
2498 EXP is the expression that is a call to the builtin function;
2499 if convenient, the result should be placed in TARGET. SUBTARGET may
2500 be used as the target for computing one of EXP's operands. */
2502 static rtx
2503 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2505 convert_optab builtin_optab;
2506 rtx op0, insns, tmp;
2507 tree fndecl = get_callee_fndecl (exp);
2508 enum built_in_function fallback_fn;
2509 tree fallback_fndecl;
2510 enum machine_mode mode;
2511 tree arg;
2513 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2514 gcc_unreachable ();
2516 arg = CALL_EXPR_ARG (exp, 0);
2518 switch (DECL_FUNCTION_CODE (fndecl))
2520 CASE_FLT_FN (BUILT_IN_LCEIL):
2521 CASE_FLT_FN (BUILT_IN_LLCEIL):
2522 builtin_optab = lceil_optab;
2523 fallback_fn = BUILT_IN_CEIL;
2524 break;
2526 CASE_FLT_FN (BUILT_IN_LFLOOR):
2527 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2528 builtin_optab = lfloor_optab;
2529 fallback_fn = BUILT_IN_FLOOR;
2530 break;
2532 default:
2533 gcc_unreachable ();
2536 /* Make a suitable register to place result in. */
2537 mode = TYPE_MODE (TREE_TYPE (exp));
2539 target = gen_reg_rtx (mode);
2541 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2542 need to expand the argument again. This way, we will not perform
2543 side-effects more the once. */
2544 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2546 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2548 start_sequence ();
2550 /* Compute into TARGET. */
2551 if (expand_sfix_optab (target, op0, builtin_optab))
2553 /* Output the entire sequence. */
2554 insns = get_insns ();
2555 end_sequence ();
2556 emit_insn (insns);
2557 return target;
2560 /* If we were unable to expand via the builtin, stop the sequence
2561 (without outputting the insns). */
2562 end_sequence ();
2564 /* Fall back to floating point rounding optab. */
2565 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2567 /* For non-C99 targets we may end up without a fallback fndecl here
2568 if the user called __builtin_lfloor directly. In this case emit
2569 a call to the floor/ceil variants nevertheless. This should result
2570 in the best user experience for not full C99 targets. */
2571 if (fallback_fndecl == NULL_TREE)
2573 tree fntype;
2574 const char *name = NULL;
2576 switch (DECL_FUNCTION_CODE (fndecl))
2578 case BUILT_IN_LCEIL:
2579 case BUILT_IN_LLCEIL:
2580 name = "ceil";
2581 break;
2582 case BUILT_IN_LCEILF:
2583 case BUILT_IN_LLCEILF:
2584 name = "ceilf";
2585 break;
2586 case BUILT_IN_LCEILL:
2587 case BUILT_IN_LLCEILL:
2588 name = "ceill";
2589 break;
2590 case BUILT_IN_LFLOOR:
2591 case BUILT_IN_LLFLOOR:
2592 name = "floor";
2593 break;
2594 case BUILT_IN_LFLOORF:
2595 case BUILT_IN_LLFLOORF:
2596 name = "floorf";
2597 break;
2598 case BUILT_IN_LFLOORL:
2599 case BUILT_IN_LLFLOORL:
2600 name = "floorl";
2601 break;
2602 default:
2603 gcc_unreachable ();
2606 fntype = build_function_type_list (TREE_TYPE (arg),
2607 TREE_TYPE (arg), NULL_TREE);
2608 fallback_fndecl = build_fn_decl (name, fntype);
2611 exp = build_call_expr (fallback_fndecl, 1, arg);
2613 tmp = expand_normal (exp);
2615 /* Truncate the result of floating point optab to integer
2616 via expand_fix (). */
2617 target = gen_reg_rtx (mode);
2618 expand_fix (target, tmp, 0);
2620 return target;
2623 /* Expand a call to one of the builtin math functions doing integer
2624 conversion (lrint).
2625 Return 0 if a normal call should be emitted rather than expanding the
2626 function in-line. EXP is the expression that is a call to the builtin
2627 function; if convenient, the result should be placed in TARGET.
2628 SUBTARGET may be used as the target for computing one of EXP's operands. */
2630 static rtx
2631 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2633 convert_optab builtin_optab;
2634 rtx op0, insns;
2635 tree fndecl = get_callee_fndecl (exp);
2636 tree arg;
2637 enum machine_mode mode;
2639 /* There's no easy way to detect the case we need to set EDOM. */
2640 if (flag_errno_math)
2641 return NULL_RTX;
2643 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2644 gcc_unreachable ();
2646 arg = CALL_EXPR_ARG (exp, 0);
2648 switch (DECL_FUNCTION_CODE (fndecl))
2650 CASE_FLT_FN (BUILT_IN_LRINT):
2651 CASE_FLT_FN (BUILT_IN_LLRINT):
2652 builtin_optab = lrint_optab; break;
2653 CASE_FLT_FN (BUILT_IN_LROUND):
2654 CASE_FLT_FN (BUILT_IN_LLROUND):
2655 builtin_optab = lround_optab; break;
2656 default:
2657 gcc_unreachable ();
2660 /* Make a suitable register to place result in. */
2661 mode = TYPE_MODE (TREE_TYPE (exp));
2663 target = gen_reg_rtx (mode);
2665 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2666 need to expand the argument again. This way, we will not perform
2667 side-effects more the once. */
2668 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2670 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2672 start_sequence ();
2674 if (expand_sfix_optab (target, op0, builtin_optab))
2676 /* Output the entire sequence. */
2677 insns = get_insns ();
2678 end_sequence ();
2679 emit_insn (insns);
2680 return target;
2683 /* If we were unable to expand via the builtin, stop the sequence
2684 (without outputting the insns) and call to the library function
2685 with the stabilized argument list. */
2686 end_sequence ();
2688 target = expand_call (exp, target, target == const0_rtx);
2690 return target;
2693 /* To evaluate powi(x,n), the floating point value x raised to the
2694 constant integer exponent n, we use a hybrid algorithm that
2695 combines the "window method" with look-up tables. For an
2696 introduction to exponentiation algorithms and "addition chains",
2697 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2698 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2699 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2700 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2702 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2703 multiplications to inline before calling the system library's pow
2704 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2705 so this default never requires calling pow, powf or powl. */
2707 #ifndef POWI_MAX_MULTS
2708 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2709 #endif
2711 /* The size of the "optimal power tree" lookup table. All
2712 exponents less than this value are simply looked up in the
2713 powi_table below. This threshold is also used to size the
2714 cache of pseudo registers that hold intermediate results. */
2715 #define POWI_TABLE_SIZE 256
2717 /* The size, in bits of the window, used in the "window method"
2718 exponentiation algorithm. This is equivalent to a radix of
2719 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2720 #define POWI_WINDOW_SIZE 3
2722 /* The following table is an efficient representation of an
2723 "optimal power tree". For each value, i, the corresponding
2724 value, j, in the table states than an optimal evaluation
2725 sequence for calculating pow(x,i) can be found by evaluating
2726 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2727 100 integers is given in Knuth's "Seminumerical algorithms". */
2729 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2731 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2732 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2733 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2734 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2735 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2736 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2737 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2738 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2739 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2740 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2741 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2742 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2743 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2744 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2745 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2746 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2747 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2748 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2749 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2750 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2751 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2752 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2753 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2754 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2755 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2756 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2757 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2758 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2759 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2760 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2761 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2762 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2766 /* Return the number of multiplications required to calculate
2767 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2768 subroutine of powi_cost. CACHE is an array indicating
2769 which exponents have already been calculated. */
2771 static int
2772 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2774 /* If we've already calculated this exponent, then this evaluation
2775 doesn't require any additional multiplications. */
2776 if (cache[n])
2777 return 0;
2779 cache[n] = true;
2780 return powi_lookup_cost (n - powi_table[n], cache)
2781 + powi_lookup_cost (powi_table[n], cache) + 1;
2784 /* Return the number of multiplications required to calculate
2785 powi(x,n) for an arbitrary x, given the exponent N. This
2786 function needs to be kept in sync with expand_powi below. */
2788 static int
2789 powi_cost (HOST_WIDE_INT n)
2791 bool cache[POWI_TABLE_SIZE];
2792 unsigned HOST_WIDE_INT digit;
2793 unsigned HOST_WIDE_INT val;
2794 int result;
2796 if (n == 0)
2797 return 0;
2799 /* Ignore the reciprocal when calculating the cost. */
2800 val = (n < 0) ? -n : n;
2802 /* Initialize the exponent cache. */
2803 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2804 cache[1] = true;
2806 result = 0;
2808 while (val >= POWI_TABLE_SIZE)
2810 if (val & 1)
2812 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2813 result += powi_lookup_cost (digit, cache)
2814 + POWI_WINDOW_SIZE + 1;
2815 val >>= POWI_WINDOW_SIZE;
2817 else
2819 val >>= 1;
2820 result++;
2824 return result + powi_lookup_cost (val, cache);
2827 /* Recursive subroutine of expand_powi. This function takes the array,
2828 CACHE, of already calculated exponents and an exponent N and returns
2829 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2831 static rtx
2832 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2834 unsigned HOST_WIDE_INT digit;
2835 rtx target, result;
2836 rtx op0, op1;
2838 if (n < POWI_TABLE_SIZE)
2840 if (cache[n])
2841 return cache[n];
2843 target = gen_reg_rtx (mode);
2844 cache[n] = target;
2846 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2847 op1 = expand_powi_1 (mode, powi_table[n], cache);
2849 else if (n & 1)
2851 target = gen_reg_rtx (mode);
2852 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2853 op0 = expand_powi_1 (mode, n - digit, cache);
2854 op1 = expand_powi_1 (mode, digit, cache);
2856 else
2858 target = gen_reg_rtx (mode);
2859 op0 = expand_powi_1 (mode, n >> 1, cache);
2860 op1 = op0;
2863 result = expand_mult (mode, op0, op1, target, 0);
2864 if (result != target)
2865 emit_move_insn (target, result);
2866 return target;
2869 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2870 floating point operand in mode MODE, and N is the exponent. This
2871 function needs to be kept in sync with powi_cost above. */
2873 static rtx
2874 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2876 unsigned HOST_WIDE_INT val;
2877 rtx cache[POWI_TABLE_SIZE];
2878 rtx result;
2880 if (n == 0)
2881 return CONST1_RTX (mode);
2883 val = (n < 0) ? -n : n;
2885 memset (cache, 0, sizeof (cache));
2886 cache[1] = x;
2888 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2890 /* If the original exponent was negative, reciprocate the result. */
2891 if (n < 0)
2892 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2893 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2895 return result;
2898 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2899 a normal call should be emitted rather than expanding the function
2900 in-line. EXP is the expression that is a call to the builtin
2901 function; if convenient, the result should be placed in TARGET. */
2903 static rtx
2904 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2906 tree arg0, arg1;
2907 tree fn, narg0;
2908 tree type = TREE_TYPE (exp);
2909 REAL_VALUE_TYPE cint, c, c2;
2910 HOST_WIDE_INT n;
2911 rtx op, op2;
2912 enum machine_mode mode = TYPE_MODE (type);
2914 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2915 return NULL_RTX;
2917 arg0 = CALL_EXPR_ARG (exp, 0);
2918 arg1 = CALL_EXPR_ARG (exp, 1);
2920 if (TREE_CODE (arg1) != REAL_CST
2921 || TREE_OVERFLOW (arg1))
2922 return expand_builtin_mathfn_2 (exp, target, subtarget);
2924 /* Handle constant exponents. */
2926 /* For integer valued exponents we can expand to an optimal multiplication
2927 sequence using expand_powi. */
2928 c = TREE_REAL_CST (arg1);
2929 n = real_to_integer (&c);
2930 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2931 if (real_identical (&c, &cint)
2932 && ((n >= -1 && n <= 2)
2933 || (flag_unsafe_math_optimizations
2934 && !optimize_size
2935 && powi_cost (n) <= POWI_MAX_MULTS)))
2937 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2938 if (n != 1)
2940 op = force_reg (mode, op);
2941 op = expand_powi (op, mode, n);
2943 return op;
2946 narg0 = builtin_save_expr (arg0);
2948 /* If the exponent is not integer valued, check if it is half of an integer.
2949 In this case we can expand to sqrt (x) * x**(n/2). */
2950 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2951 if (fn != NULL_TREE)
2953 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2954 n = real_to_integer (&c2);
2955 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2956 if (real_identical (&c2, &cint)
2957 && ((flag_unsafe_math_optimizations
2958 && !optimize_size
2959 && powi_cost (n/2) <= POWI_MAX_MULTS)
2960 || n == 1))
2962 tree call_expr = build_call_expr (fn, 1, narg0);
2963 /* Use expand_expr in case the newly built call expression
2964 was folded to a non-call. */
2965 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2966 if (n != 1)
2968 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2969 op2 = force_reg (mode, op2);
2970 op2 = expand_powi (op2, mode, abs (n / 2));
2971 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2972 0, OPTAB_LIB_WIDEN);
2973 /* If the original exponent was negative, reciprocate the
2974 result. */
2975 if (n < 0)
2976 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2977 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2979 return op;
2983 /* Try if the exponent is a third of an integer. In this case
2984 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2985 different from pow (x, 1./3.) due to rounding and behavior
2986 with negative x we need to constrain this transformation to
2987 unsafe math and positive x or finite math. */
2988 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2989 if (fn != NULL_TREE
2990 && flag_unsafe_math_optimizations
2991 && (tree_expr_nonnegative_p (arg0)
2992 || !HONOR_NANS (mode)))
2994 REAL_VALUE_TYPE dconst3;
2995 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
2996 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2997 real_round (&c2, mode, &c2);
2998 n = real_to_integer (&c2);
2999 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3000 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
3001 real_convert (&c2, mode, &c2);
3002 if (real_identical (&c2, &c)
3003 && ((!optimize_size
3004 && powi_cost (n/3) <= POWI_MAX_MULTS)
3005 || n == 1))
3007 tree call_expr = build_call_expr (fn, 1,narg0);
3008 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
3009 if (abs (n) % 3 == 2)
3010 op = expand_simple_binop (mode, MULT, op, op, op,
3011 0, OPTAB_LIB_WIDEN);
3012 if (n != 1)
3014 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
3015 op2 = force_reg (mode, op2);
3016 op2 = expand_powi (op2, mode, abs (n / 3));
3017 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
3018 0, OPTAB_LIB_WIDEN);
3019 /* If the original exponent was negative, reciprocate the
3020 result. */
3021 if (n < 0)
3022 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
3023 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
3025 return op;
3029 /* Fall back to optab expansion. */
3030 return expand_builtin_mathfn_2 (exp, target, subtarget);
3033 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3034 a normal call should be emitted rather than expanding the function
3035 in-line. EXP is the expression that is a call to the builtin
3036 function; if convenient, the result should be placed in TARGET. */
3038 static rtx
3039 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3041 tree arg0, arg1;
3042 rtx op0, op1;
3043 enum machine_mode mode;
3044 enum machine_mode mode2;
3046 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3047 return NULL_RTX;
3049 arg0 = CALL_EXPR_ARG (exp, 0);
3050 arg1 = CALL_EXPR_ARG (exp, 1);
3051 mode = TYPE_MODE (TREE_TYPE (exp));
3053 /* Handle constant power. */
3055 if (TREE_CODE (arg1) == INTEGER_CST
3056 && !TREE_OVERFLOW (arg1))
3058 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3060 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3061 Otherwise, check the number of multiplications required. */
3062 if ((TREE_INT_CST_HIGH (arg1) == 0
3063 || TREE_INT_CST_HIGH (arg1) == -1)
3064 && ((n >= -1 && n <= 2)
3065 || (! optimize_size
3066 && powi_cost (n) <= POWI_MAX_MULTS)))
3068 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3069 op0 = force_reg (mode, op0);
3070 return expand_powi (op0, mode, n);
3074 /* Emit a libcall to libgcc. */
3076 /* Mode of the 2nd argument must match that of an int. */
3077 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3079 if (target == NULL_RTX)
3080 target = gen_reg_rtx (mode);
3082 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3083 if (GET_MODE (op0) != mode)
3084 op0 = convert_to_mode (mode, op0, 0);
3085 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3086 if (GET_MODE (op1) != mode2)
3087 op1 = convert_to_mode (mode2, op1, 0);
3089 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3090 target, LCT_CONST, mode, 2,
3091 op0, mode, op1, mode2);
3093 return target;
3096 /* Expand expression EXP which is a call to the strlen builtin. Return
3097 NULL_RTX if we failed the caller should emit a normal call, otherwise
3098 try to get the result in TARGET, if convenient. */
3100 static rtx
3101 expand_builtin_strlen (tree exp, rtx target,
3102 enum machine_mode target_mode)
3104 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3105 return NULL_RTX;
3106 else
3108 rtx pat;
3109 tree len;
3110 tree src = CALL_EXPR_ARG (exp, 0);
3111 rtx result, src_reg, char_rtx, before_strlen;
3112 enum machine_mode insn_mode = target_mode, char_mode;
3113 enum insn_code icode = CODE_FOR_nothing;
3114 int align;
3116 /* If the length can be computed at compile-time, return it. */
3117 len = c_strlen (src, 0);
3118 if (len)
3119 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3121 /* If the length can be computed at compile-time and is constant
3122 integer, but there are side-effects in src, evaluate
3123 src for side-effects, then return len.
3124 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3125 can be optimized into: i++; x = 3; */
3126 len = c_strlen (src, 1);
3127 if (len && TREE_CODE (len) == INTEGER_CST)
3129 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3130 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3133 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3135 /* If SRC is not a pointer type, don't do this operation inline. */
3136 if (align == 0)
3137 return NULL_RTX;
3139 /* Bail out if we can't compute strlen in the right mode. */
3140 while (insn_mode != VOIDmode)
3142 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3143 if (icode != CODE_FOR_nothing)
3144 break;
3146 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3148 if (insn_mode == VOIDmode)
3149 return NULL_RTX;
3151 /* Make a place to write the result of the instruction. */
3152 result = target;
3153 if (! (result != 0
3154 && REG_P (result)
3155 && GET_MODE (result) == insn_mode
3156 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3157 result = gen_reg_rtx (insn_mode);
3159 /* Make a place to hold the source address. We will not expand
3160 the actual source until we are sure that the expansion will
3161 not fail -- there are trees that cannot be expanded twice. */
3162 src_reg = gen_reg_rtx (Pmode);
3164 /* Mark the beginning of the strlen sequence so we can emit the
3165 source operand later. */
3166 before_strlen = get_last_insn ();
3168 char_rtx = const0_rtx;
3169 char_mode = insn_data[(int) icode].operand[2].mode;
3170 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3171 char_mode))
3172 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3174 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3175 char_rtx, GEN_INT (align));
3176 if (! pat)
3177 return NULL_RTX;
3178 emit_insn (pat);
3180 /* Now that we are assured of success, expand the source. */
3181 start_sequence ();
3182 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3183 if (pat != src_reg)
3184 emit_move_insn (src_reg, pat);
3185 pat = get_insns ();
3186 end_sequence ();
3188 if (before_strlen)
3189 emit_insn_after (pat, before_strlen);
3190 else
3191 emit_insn_before (pat, get_insns ());
3193 /* Return the value in the proper mode for this function. */
3194 if (GET_MODE (result) == target_mode)
3195 target = result;
3196 else if (target != 0)
3197 convert_move (target, result, 0);
3198 else
3199 target = convert_to_mode (target_mode, result, 0);
3201 return target;
3205 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3206 caller should emit a normal call, otherwise try to get the result
3207 in TARGET, if convenient (and in mode MODE if that's convenient). */
3209 static rtx
3210 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3212 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3214 tree type = TREE_TYPE (exp);
3215 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3216 CALL_EXPR_ARG (exp, 1), type);
3217 if (result)
3218 return expand_expr (result, target, mode, EXPAND_NORMAL);
3220 return NULL_RTX;
3223 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3224 caller should emit a normal call, otherwise try to get the result
3225 in TARGET, if convenient (and in mode MODE if that's convenient). */
3227 static rtx
3228 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3230 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3232 tree type = TREE_TYPE (exp);
3233 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3234 CALL_EXPR_ARG (exp, 1), type);
3235 if (result)
3236 return expand_expr (result, target, mode, EXPAND_NORMAL);
3238 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3240 return NULL_RTX;
3243 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3244 caller should emit a normal call, otherwise try to get the result
3245 in TARGET, if convenient (and in mode MODE if that's convenient). */
3247 static rtx
3248 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3250 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3252 tree type = TREE_TYPE (exp);
3253 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3254 CALL_EXPR_ARG (exp, 1), type);
3255 if (result)
3256 return expand_expr (result, target, mode, EXPAND_NORMAL);
3258 return NULL_RTX;
3261 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3262 caller should emit a normal call, otherwise try to get the result
3263 in TARGET, if convenient (and in mode MODE if that's convenient). */
3265 static rtx
3266 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3268 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3270 tree type = TREE_TYPE (exp);
3271 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3272 CALL_EXPR_ARG (exp, 1), type);
3273 if (result)
3274 return expand_expr (result, target, mode, EXPAND_NORMAL);
3276 return NULL_RTX;
3279 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3280 bytes from constant string DATA + OFFSET and return it as target
3281 constant. */
3283 static rtx
3284 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3285 enum machine_mode mode)
3287 const char *str = (const char *) data;
3289 gcc_assert (offset >= 0
3290 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3291 <= strlen (str) + 1));
3293 return c_readstr (str + offset, mode);
3296 /* Expand a call EXP to the memcpy builtin.
3297 Return NULL_RTX if we failed, the caller should emit a normal call,
3298 otherwise try to get the result in TARGET, if convenient (and in
3299 mode MODE if that's convenient). */
3301 static rtx
3302 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3304 tree fndecl = get_callee_fndecl (exp);
3306 if (!validate_arglist (exp,
3307 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3308 return NULL_RTX;
3309 else
3311 tree dest = CALL_EXPR_ARG (exp, 0);
3312 tree src = CALL_EXPR_ARG (exp, 1);
3313 tree len = CALL_EXPR_ARG (exp, 2);
3314 const char *src_str;
3315 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3316 unsigned int dest_align
3317 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3318 rtx dest_mem, src_mem, dest_addr, len_rtx;
3319 tree result = fold_builtin_memory_op (dest, src, len,
3320 TREE_TYPE (TREE_TYPE (fndecl)),
3321 false, /*endp=*/0);
3322 HOST_WIDE_INT expected_size = -1;
3323 unsigned int expected_align = 0;
3325 if (result)
3327 while (TREE_CODE (result) == COMPOUND_EXPR)
3329 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3330 EXPAND_NORMAL);
3331 result = TREE_OPERAND (result, 1);
3333 return expand_expr (result, target, mode, EXPAND_NORMAL);
3336 /* If DEST is not a pointer type, call the normal function. */
3337 if (dest_align == 0)
3338 return NULL_RTX;
3340 /* If either SRC is not a pointer type, don't do this
3341 operation in-line. */
3342 if (src_align == 0)
3343 return NULL_RTX;
3345 stringop_block_profile (exp, &expected_align, &expected_size);
3346 if (expected_align < dest_align)
3347 expected_align = dest_align;
3348 dest_mem = get_memory_rtx (dest, len);
3349 set_mem_align (dest_mem, dest_align);
3350 len_rtx = expand_normal (len);
3351 src_str = c_getstr (src);
3353 /* If SRC is a string constant and block move would be done
3354 by pieces, we can avoid loading the string from memory
3355 and only stored the computed constants. */
3356 if (src_str
3357 && GET_CODE (len_rtx) == CONST_INT
3358 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3359 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3360 (void *) src_str, dest_align, false))
3362 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3363 builtin_memcpy_read_str,
3364 (void *) src_str, dest_align, false, 0);
3365 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3366 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3367 return dest_mem;
3370 src_mem = get_memory_rtx (src, len);
3371 set_mem_align (src_mem, src_align);
3373 /* Copy word part most expediently. */
3374 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3375 CALL_EXPR_TAILCALL (exp)
3376 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3377 expected_align, expected_size);
3379 if (dest_addr == 0)
3381 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3382 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3384 return dest_addr;
3388 /* Expand a call EXP to the mempcpy builtin.
3389 Return NULL_RTX if we failed; the caller should emit a normal call,
3390 otherwise try to get the result in TARGET, if convenient (and in
3391 mode MODE if that's convenient). If ENDP is 0 return the
3392 destination pointer, if ENDP is 1 return the end pointer ala
3393 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3394 stpcpy. */
3396 static rtx
3397 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3399 if (!validate_arglist (exp,
3400 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3401 return NULL_RTX;
3402 else
3404 tree dest = CALL_EXPR_ARG (exp, 0);
3405 tree src = CALL_EXPR_ARG (exp, 1);
3406 tree len = CALL_EXPR_ARG (exp, 2);
3407 return expand_builtin_mempcpy_args (dest, src, len,
3408 TREE_TYPE (exp),
3409 target, mode, /*endp=*/ 1);
3413 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3414 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3415 so that this can also be called without constructing an actual CALL_EXPR.
3416 TYPE is the return type of the call. The other arguments and return value
3417 are the same as for expand_builtin_mempcpy. */
3419 static rtx
3420 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3421 rtx target, enum machine_mode mode, int endp)
3423 /* If return value is ignored, transform mempcpy into memcpy. */
3424 if (target == const0_rtx)
3426 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3428 if (!fn)
3429 return NULL_RTX;
3431 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3432 target, mode, EXPAND_NORMAL);
3434 else
3436 const char *src_str;
3437 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3438 unsigned int dest_align
3439 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3440 rtx dest_mem, src_mem, len_rtx;
3441 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3443 if (result)
3445 while (TREE_CODE (result) == COMPOUND_EXPR)
3447 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3448 EXPAND_NORMAL);
3449 result = TREE_OPERAND (result, 1);
3451 return expand_expr (result, target, mode, EXPAND_NORMAL);
3454 /* If either SRC or DEST is not a pointer type, don't do this
3455 operation in-line. */
3456 if (dest_align == 0 || src_align == 0)
3457 return NULL_RTX;
3459 /* If LEN is not constant, call the normal function. */
3460 if (! host_integerp (len, 1))
3461 return NULL_RTX;
3463 len_rtx = expand_normal (len);
3464 src_str = c_getstr (src);
3466 /* If SRC is a string constant and block move would be done
3467 by pieces, we can avoid loading the string from memory
3468 and only stored the computed constants. */
3469 if (src_str
3470 && GET_CODE (len_rtx) == CONST_INT
3471 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3472 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3473 (void *) src_str, dest_align, false))
3475 dest_mem = get_memory_rtx (dest, len);
3476 set_mem_align (dest_mem, dest_align);
3477 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3478 builtin_memcpy_read_str,
3479 (void *) src_str, dest_align,
3480 false, endp);
3481 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3482 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3483 return dest_mem;
3486 if (GET_CODE (len_rtx) == CONST_INT
3487 && can_move_by_pieces (INTVAL (len_rtx),
3488 MIN (dest_align, src_align)))
3490 dest_mem = get_memory_rtx (dest, len);
3491 set_mem_align (dest_mem, dest_align);
3492 src_mem = get_memory_rtx (src, len);
3493 set_mem_align (src_mem, src_align);
3494 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3495 MIN (dest_align, src_align), endp);
3496 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3497 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3498 return dest_mem;
3501 return NULL_RTX;
3505 /* Expand expression EXP, which is a call to the memmove builtin. Return
3506 NULL_RTX if we failed; the caller should emit a normal call. */
3508 static rtx
3509 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3511 if (!validate_arglist (exp,
3512 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3513 return NULL_RTX;
3514 else
3516 tree dest = CALL_EXPR_ARG (exp, 0);
3517 tree src = CALL_EXPR_ARG (exp, 1);
3518 tree len = CALL_EXPR_ARG (exp, 2);
3519 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3520 target, mode, ignore);
3524 /* Helper function to do the actual work for expand_builtin_memmove. The
3525 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3526 so that this can also be called without constructing an actual CALL_EXPR.
3527 TYPE is the return type of the call. The other arguments and return value
3528 are the same as for expand_builtin_memmove. */
3530 static rtx
3531 expand_builtin_memmove_args (tree dest, tree src, tree len,
3532 tree type, rtx target, enum machine_mode mode,
3533 int ignore)
3535 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3537 if (result)
3539 STRIP_TYPE_NOPS (result);
3540 while (TREE_CODE (result) == COMPOUND_EXPR)
3542 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3543 EXPAND_NORMAL);
3544 result = TREE_OPERAND (result, 1);
3546 return expand_expr (result, target, mode, EXPAND_NORMAL);
3549 /* Otherwise, call the normal function. */
3550 return NULL_RTX;
3553 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3554 NULL_RTX if we failed the caller should emit a normal call. */
3556 static rtx
3557 expand_builtin_bcopy (tree exp, int ignore)
3559 tree type = TREE_TYPE (exp);
3560 tree src, dest, size;
3562 if (!validate_arglist (exp,
3563 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3564 return NULL_RTX;
3566 src = CALL_EXPR_ARG (exp, 0);
3567 dest = CALL_EXPR_ARG (exp, 1);
3568 size = CALL_EXPR_ARG (exp, 2);
3570 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3571 This is done this way so that if it isn't expanded inline, we fall
3572 back to calling bcopy instead of memmove. */
3573 return expand_builtin_memmove_args (dest, src,
3574 fold_convert (sizetype, size),
3575 type, const0_rtx, VOIDmode,
3576 ignore);
3579 #ifndef HAVE_movstr
3580 # define HAVE_movstr 0
3581 # define CODE_FOR_movstr CODE_FOR_nothing
3582 #endif
3584 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3585 we failed, the caller should emit a normal call, otherwise try to
3586 get the result in TARGET, if convenient. If ENDP is 0 return the
3587 destination pointer, if ENDP is 1 return the end pointer ala
3588 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3589 stpcpy. */
3591 static rtx
3592 expand_movstr (tree dest, tree src, rtx target, int endp)
3594 rtx end;
3595 rtx dest_mem;
3596 rtx src_mem;
3597 rtx insn;
3598 const struct insn_data * data;
3600 if (!HAVE_movstr)
3601 return NULL_RTX;
3603 dest_mem = get_memory_rtx (dest, NULL);
3604 src_mem = get_memory_rtx (src, NULL);
3605 if (!endp)
3607 target = force_reg (Pmode, XEXP (dest_mem, 0));
3608 dest_mem = replace_equiv_address (dest_mem, target);
3609 end = gen_reg_rtx (Pmode);
3611 else
3613 if (target == 0 || target == const0_rtx)
3615 end = gen_reg_rtx (Pmode);
3616 if (target == 0)
3617 target = end;
3619 else
3620 end = target;
3623 data = insn_data + CODE_FOR_movstr;
3625 if (data->operand[0].mode != VOIDmode)
3626 end = gen_lowpart (data->operand[0].mode, end);
3628 insn = data->genfun (end, dest_mem, src_mem);
3630 gcc_assert (insn);
3632 emit_insn (insn);
3634 /* movstr is supposed to set end to the address of the NUL
3635 terminator. If the caller requested a mempcpy-like return value,
3636 adjust it. */
3637 if (endp == 1 && target != const0_rtx)
3639 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3640 emit_move_insn (target, force_operand (tem, NULL_RTX));
3643 return target;
3646 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3647 NULL_RTX if we failed the caller should emit a normal call, otherwise
3648 try to get the result in TARGET, if convenient (and in mode MODE if that's
3649 convenient). */
3651 static rtx
3652 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3654 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3656 tree dest = CALL_EXPR_ARG (exp, 0);
3657 tree src = CALL_EXPR_ARG (exp, 1);
3658 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3660 return NULL_RTX;
3663 /* Helper function to do the actual work for expand_builtin_strcpy. The
3664 arguments to the builtin_strcpy call DEST and SRC are broken out
3665 so that this can also be called without constructing an actual CALL_EXPR.
3666 The other arguments and return value are the same as for
3667 expand_builtin_strcpy. */
3669 static rtx
3670 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3671 rtx target, enum machine_mode mode)
3673 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3674 if (result)
3675 return expand_expr (result, target, mode, EXPAND_NORMAL);
3676 return expand_movstr (dest, src, target, /*endp=*/0);
3680 /* Expand a call EXP to the stpcpy builtin.
3681 Return NULL_RTX if we failed the caller should emit a normal call,
3682 otherwise try to get the result in TARGET, if convenient (and in
3683 mode MODE if that's convenient). */
3685 static rtx
3686 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3688 tree dst, src;
3690 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3691 return NULL_RTX;
3693 dst = CALL_EXPR_ARG (exp, 0);
3694 src = CALL_EXPR_ARG (exp, 1);
3696 /* If return value is ignored, transform stpcpy into strcpy. */
3697 if (target == const0_rtx)
3699 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3700 if (!fn)
3701 return NULL_RTX;
3703 return expand_expr (build_call_expr (fn, 2, dst, src),
3704 target, mode, EXPAND_NORMAL);
3706 else
3708 tree len, lenp1;
3709 rtx ret;
3711 /* Ensure we get an actual string whose length can be evaluated at
3712 compile-time, not an expression containing a string. This is
3713 because the latter will potentially produce pessimized code
3714 when used to produce the return value. */
3715 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3716 return expand_movstr (dst, src, target, /*endp=*/2);
3718 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3719 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3720 target, mode, /*endp=*/2);
3722 if (ret)
3723 return ret;
3725 if (TREE_CODE (len) == INTEGER_CST)
3727 rtx len_rtx = expand_normal (len);
3729 if (GET_CODE (len_rtx) == CONST_INT)
3731 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3732 dst, src, target, mode);
3734 if (ret)
3736 if (! target)
3738 if (mode != VOIDmode)
3739 target = gen_reg_rtx (mode);
3740 else
3741 target = gen_reg_rtx (GET_MODE (ret));
3743 if (GET_MODE (target) != GET_MODE (ret))
3744 ret = gen_lowpart (GET_MODE (target), ret);
3746 ret = plus_constant (ret, INTVAL (len_rtx));
3747 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3748 gcc_assert (ret);
3750 return target;
3755 return expand_movstr (dst, src, target, /*endp=*/2);
3759 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3760 bytes from constant string DATA + OFFSET and return it as target
3761 constant. */
3764 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3765 enum machine_mode mode)
3767 const char *str = (const char *) data;
3769 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3770 return const0_rtx;
3772 return c_readstr (str + offset, mode);
3775 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3776 NULL_RTX if we failed the caller should emit a normal call. */
3778 static rtx
3779 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3781 tree fndecl = get_callee_fndecl (exp);
3783 if (validate_arglist (exp,
3784 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3786 tree dest = CALL_EXPR_ARG (exp, 0);
3787 tree src = CALL_EXPR_ARG (exp, 1);
3788 tree len = CALL_EXPR_ARG (exp, 2);
3789 tree slen = c_strlen (src, 1);
3790 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3792 if (result)
3794 while (TREE_CODE (result) == COMPOUND_EXPR)
3796 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3797 EXPAND_NORMAL);
3798 result = TREE_OPERAND (result, 1);
3800 return expand_expr (result, target, mode, EXPAND_NORMAL);
3803 /* We must be passed a constant len and src parameter. */
3804 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3805 return NULL_RTX;
3807 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3809 /* We're required to pad with trailing zeros if the requested
3810 len is greater than strlen(s2)+1. In that case try to
3811 use store_by_pieces, if it fails, punt. */
3812 if (tree_int_cst_lt (slen, len))
3814 unsigned int dest_align
3815 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3816 const char *p = c_getstr (src);
3817 rtx dest_mem;
3819 if (!p || dest_align == 0 || !host_integerp (len, 1)
3820 || !can_store_by_pieces (tree_low_cst (len, 1),
3821 builtin_strncpy_read_str,
3822 (void *) p, dest_align, false))
3823 return NULL_RTX;
3825 dest_mem = get_memory_rtx (dest, len);
3826 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3827 builtin_strncpy_read_str,
3828 (void *) p, dest_align, false, 0);
3829 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3830 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3831 return dest_mem;
3834 return NULL_RTX;
3837 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3838 bytes from constant string DATA + OFFSET and return it as target
3839 constant. */
3842 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3843 enum machine_mode mode)
3845 const char *c = (const char *) data;
3846 char *p = alloca (GET_MODE_SIZE (mode));
3848 memset (p, *c, GET_MODE_SIZE (mode));
3850 return c_readstr (p, mode);
3853 /* Callback routine for store_by_pieces. Return the RTL of a register
3854 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3855 char value given in the RTL register data. For example, if mode is
3856 4 bytes wide, return the RTL for 0x01010101*data. */
3858 static rtx
3859 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3860 enum machine_mode mode)
3862 rtx target, coeff;
3863 size_t size;
3864 char *p;
3866 size = GET_MODE_SIZE (mode);
3867 if (size == 1)
3868 return (rtx) data;
3870 p = alloca (size);
3871 memset (p, 1, size);
3872 coeff = c_readstr (p, mode);
3874 target = convert_to_mode (mode, (rtx) data, 1);
3875 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3876 return force_reg (mode, target);
3879 /* Expand expression EXP, which is a call to the memset builtin. Return
3880 NULL_RTX if we failed the caller should emit a normal call, otherwise
3881 try to get the result in TARGET, if convenient (and in mode MODE if that's
3882 convenient). */
3884 static rtx
3885 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3887 if (!validate_arglist (exp,
3888 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3889 return NULL_RTX;
3890 else
3892 tree dest = CALL_EXPR_ARG (exp, 0);
3893 tree val = CALL_EXPR_ARG (exp, 1);
3894 tree len = CALL_EXPR_ARG (exp, 2);
3895 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3899 /* Helper function to do the actual work for expand_builtin_memset. The
3900 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3901 so that this can also be called without constructing an actual CALL_EXPR.
3902 The other arguments and return value are the same as for
3903 expand_builtin_memset. */
3905 static rtx
3906 expand_builtin_memset_args (tree dest, tree val, tree len,
3907 rtx target, enum machine_mode mode, tree orig_exp)
3909 tree fndecl, fn;
3910 enum built_in_function fcode;
3911 char c;
3912 unsigned int dest_align;
3913 rtx dest_mem, dest_addr, len_rtx;
3914 HOST_WIDE_INT expected_size = -1;
3915 unsigned int expected_align = 0;
3917 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3919 /* If DEST is not a pointer type, don't do this operation in-line. */
3920 if (dest_align == 0)
3921 return NULL_RTX;
3923 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3924 if (expected_align < dest_align)
3925 expected_align = dest_align;
3927 /* If the LEN parameter is zero, return DEST. */
3928 if (integer_zerop (len))
3930 /* Evaluate and ignore VAL in case it has side-effects. */
3931 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3932 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3935 /* Stabilize the arguments in case we fail. */
3936 dest = builtin_save_expr (dest);
3937 val = builtin_save_expr (val);
3938 len = builtin_save_expr (len);
3940 len_rtx = expand_normal (len);
3941 dest_mem = get_memory_rtx (dest, len);
3943 if (TREE_CODE (val) != INTEGER_CST)
3945 rtx val_rtx;
3947 val_rtx = expand_normal (val);
3948 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3949 val_rtx, 0);
3951 /* Assume that we can memset by pieces if we can store
3952 * the coefficients by pieces (in the required modes).
3953 * We can't pass builtin_memset_gen_str as that emits RTL. */
3954 c = 1;
3955 if (host_integerp (len, 1)
3956 && can_store_by_pieces (tree_low_cst (len, 1),
3957 builtin_memset_read_str, &c, dest_align,
3958 true))
3960 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3961 val_rtx);
3962 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3963 builtin_memset_gen_str, val_rtx, dest_align,
3964 true, 0);
3966 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3967 dest_align, expected_align,
3968 expected_size))
3969 goto do_libcall;
3971 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3972 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3973 return dest_mem;
3976 if (target_char_cast (val, &c))
3977 goto do_libcall;
3979 if (c)
3981 if (host_integerp (len, 1)
3982 && can_store_by_pieces (tree_low_cst (len, 1),
3983 builtin_memset_read_str, &c, dest_align,
3984 true))
3985 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3986 builtin_memset_read_str, &c, dest_align, true, 0);
3987 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3988 dest_align, expected_align,
3989 expected_size))
3990 goto do_libcall;
3992 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3993 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3994 return dest_mem;
3997 set_mem_align (dest_mem, dest_align);
3998 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3999 CALL_EXPR_TAILCALL (orig_exp)
4000 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4001 expected_align, expected_size);
4003 if (dest_addr == 0)
4005 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4006 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4009 return dest_addr;
4011 do_libcall:
4012 fndecl = get_callee_fndecl (orig_exp);
4013 fcode = DECL_FUNCTION_CODE (fndecl);
4014 if (fcode == BUILT_IN_MEMSET)
4015 fn = build_call_expr (fndecl, 3, dest, val, len);
4016 else if (fcode == BUILT_IN_BZERO)
4017 fn = build_call_expr (fndecl, 2, dest, len);
4018 else
4019 gcc_unreachable ();
4020 if (TREE_CODE (fn) == CALL_EXPR)
4021 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4022 return expand_call (fn, target, target == const0_rtx);
4025 /* Expand expression EXP, which is a call to the bzero builtin. Return
4026 NULL_RTX if we failed the caller should emit a normal call. */
4028 static rtx
4029 expand_builtin_bzero (tree exp)
4031 tree dest, size;
4033 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4034 return NULL_RTX;
4036 dest = CALL_EXPR_ARG (exp, 0);
4037 size = CALL_EXPR_ARG (exp, 1);
4039 /* New argument list transforming bzero(ptr x, int y) to
4040 memset(ptr x, int 0, size_t y). This is done this way
4041 so that if it isn't expanded inline, we fallback to
4042 calling bzero instead of memset. */
4044 return expand_builtin_memset_args (dest, integer_zero_node,
4045 fold_convert (sizetype, size),
4046 const0_rtx, VOIDmode, exp);
4049 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4050 caller should emit a normal call, otherwise try to get the result
4051 in TARGET, if convenient (and in mode MODE if that's convenient). */
4053 static rtx
4054 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4056 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4057 INTEGER_TYPE, VOID_TYPE))
4059 tree type = TREE_TYPE (exp);
4060 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4061 CALL_EXPR_ARG (exp, 1),
4062 CALL_EXPR_ARG (exp, 2), type);
4063 if (result)
4064 return expand_expr (result, target, mode, EXPAND_NORMAL);
4066 return NULL_RTX;
4069 /* Expand expression EXP, which is a call to the memcmp built-in function.
4070 Return NULL_RTX if we failed and the
4071 caller should emit a normal call, otherwise try to get the result in
4072 TARGET, if convenient (and in mode MODE, if that's convenient). */
4074 static rtx
4075 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4077 if (!validate_arglist (exp,
4078 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4079 return NULL_RTX;
4080 else
4082 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4083 CALL_EXPR_ARG (exp, 1),
4084 CALL_EXPR_ARG (exp, 2));
4085 if (result)
4086 return expand_expr (result, target, mode, EXPAND_NORMAL);
4089 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4091 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4092 rtx result;
4093 rtx insn;
4094 tree arg1 = CALL_EXPR_ARG (exp, 0);
4095 tree arg2 = CALL_EXPR_ARG (exp, 1);
4096 tree len = CALL_EXPR_ARG (exp, 2);
4098 int arg1_align
4099 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4100 int arg2_align
4101 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4102 enum machine_mode insn_mode;
4104 #ifdef HAVE_cmpmemsi
4105 if (HAVE_cmpmemsi)
4106 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4107 else
4108 #endif
4109 #ifdef HAVE_cmpstrnsi
4110 if (HAVE_cmpstrnsi)
4111 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4112 else
4113 #endif
4114 return NULL_RTX;
4116 /* If we don't have POINTER_TYPE, call the function. */
4117 if (arg1_align == 0 || arg2_align == 0)
4118 return NULL_RTX;
4120 /* Make a place to write the result of the instruction. */
4121 result = target;
4122 if (! (result != 0
4123 && REG_P (result) && GET_MODE (result) == insn_mode
4124 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4125 result = gen_reg_rtx (insn_mode);
4127 arg1_rtx = get_memory_rtx (arg1, len);
4128 arg2_rtx = get_memory_rtx (arg2, len);
4129 arg3_rtx = expand_normal (len);
4131 /* Set MEM_SIZE as appropriate. */
4132 if (GET_CODE (arg3_rtx) == CONST_INT)
4134 set_mem_size (arg1_rtx, arg3_rtx);
4135 set_mem_size (arg2_rtx, arg3_rtx);
4138 #ifdef HAVE_cmpmemsi
4139 if (HAVE_cmpmemsi)
4140 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4141 GEN_INT (MIN (arg1_align, arg2_align)));
4142 else
4143 #endif
4144 #ifdef HAVE_cmpstrnsi
4145 if (HAVE_cmpstrnsi)
4146 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4147 GEN_INT (MIN (arg1_align, arg2_align)));
4148 else
4149 #endif
4150 gcc_unreachable ();
4152 if (insn)
4153 emit_insn (insn);
4154 else
4155 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4156 TYPE_MODE (integer_type_node), 3,
4157 XEXP (arg1_rtx, 0), Pmode,
4158 XEXP (arg2_rtx, 0), Pmode,
4159 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4160 TYPE_UNSIGNED (sizetype)),
4161 TYPE_MODE (sizetype));
4163 /* Return the value in the proper mode for this function. */
4164 mode = TYPE_MODE (TREE_TYPE (exp));
4165 if (GET_MODE (result) == mode)
4166 return result;
4167 else if (target != 0)
4169 convert_move (target, result, 0);
4170 return target;
4172 else
4173 return convert_to_mode (mode, result, 0);
4175 #endif
4177 return NULL_RTX;
4180 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4181 if we failed the caller should emit a normal call, otherwise try to get
4182 the result in TARGET, if convenient. */
4184 static rtx
4185 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4187 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4188 return NULL_RTX;
4189 else
4191 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4192 CALL_EXPR_ARG (exp, 1));
4193 if (result)
4194 return expand_expr (result, target, mode, EXPAND_NORMAL);
4197 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4198 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4199 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4201 rtx arg1_rtx, arg2_rtx;
4202 rtx result, insn = NULL_RTX;
4203 tree fndecl, fn;
4204 tree arg1 = CALL_EXPR_ARG (exp, 0);
4205 tree arg2 = CALL_EXPR_ARG (exp, 1);
4207 int arg1_align
4208 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4209 int arg2_align
4210 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4212 /* If we don't have POINTER_TYPE, call the function. */
4213 if (arg1_align == 0 || arg2_align == 0)
4214 return NULL_RTX;
4216 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4217 arg1 = builtin_save_expr (arg1);
4218 arg2 = builtin_save_expr (arg2);
4220 arg1_rtx = get_memory_rtx (arg1, NULL);
4221 arg2_rtx = get_memory_rtx (arg2, NULL);
4223 #ifdef HAVE_cmpstrsi
4224 /* Try to call cmpstrsi. */
4225 if (HAVE_cmpstrsi)
4227 enum machine_mode insn_mode
4228 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4230 /* Make a place to write the result of the instruction. */
4231 result = target;
4232 if (! (result != 0
4233 && REG_P (result) && GET_MODE (result) == insn_mode
4234 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4235 result = gen_reg_rtx (insn_mode);
4237 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4238 GEN_INT (MIN (arg1_align, arg2_align)));
4240 #endif
4241 #ifdef HAVE_cmpstrnsi
4242 /* Try to determine at least one length and call cmpstrnsi. */
4243 if (!insn && HAVE_cmpstrnsi)
4245 tree len;
4246 rtx arg3_rtx;
4248 enum machine_mode insn_mode
4249 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4250 tree len1 = c_strlen (arg1, 1);
4251 tree len2 = c_strlen (arg2, 1);
4253 if (len1)
4254 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4255 if (len2)
4256 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4258 /* If we don't have a constant length for the first, use the length
4259 of the second, if we know it. We don't require a constant for
4260 this case; some cost analysis could be done if both are available
4261 but neither is constant. For now, assume they're equally cheap,
4262 unless one has side effects. If both strings have constant lengths,
4263 use the smaller. */
4265 if (!len1)
4266 len = len2;
4267 else if (!len2)
4268 len = len1;
4269 else if (TREE_SIDE_EFFECTS (len1))
4270 len = len2;
4271 else if (TREE_SIDE_EFFECTS (len2))
4272 len = len1;
4273 else if (TREE_CODE (len1) != INTEGER_CST)
4274 len = len2;
4275 else if (TREE_CODE (len2) != INTEGER_CST)
4276 len = len1;
4277 else if (tree_int_cst_lt (len1, len2))
4278 len = len1;
4279 else
4280 len = len2;
4282 /* If both arguments have side effects, we cannot optimize. */
4283 if (!len || TREE_SIDE_EFFECTS (len))
4284 goto do_libcall;
4286 arg3_rtx = expand_normal (len);
4288 /* Make a place to write the result of the instruction. */
4289 result = target;
4290 if (! (result != 0
4291 && REG_P (result) && GET_MODE (result) == insn_mode
4292 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4293 result = gen_reg_rtx (insn_mode);
4295 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4296 GEN_INT (MIN (arg1_align, arg2_align)));
4298 #endif
4300 if (insn)
4302 emit_insn (insn);
4304 /* Return the value in the proper mode for this function. */
4305 mode = TYPE_MODE (TREE_TYPE (exp));
4306 if (GET_MODE (result) == mode)
4307 return result;
4308 if (target == 0)
4309 return convert_to_mode (mode, result, 0);
4310 convert_move (target, result, 0);
4311 return target;
4314 /* Expand the library call ourselves using a stabilized argument
4315 list to avoid re-evaluating the function's arguments twice. */
4316 #ifdef HAVE_cmpstrnsi
4317 do_libcall:
4318 #endif
4319 fndecl = get_callee_fndecl (exp);
4320 fn = build_call_expr (fndecl, 2, arg1, arg2);
4321 if (TREE_CODE (fn) == CALL_EXPR)
4322 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4323 return expand_call (fn, target, target == const0_rtx);
4325 #endif
4326 return NULL_RTX;
4329 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4330 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4331 the result in TARGET, if convenient. */
4333 static rtx
4334 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4336 if (!validate_arglist (exp,
4337 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4338 return NULL_RTX;
4339 else
4341 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4342 CALL_EXPR_ARG (exp, 1),
4343 CALL_EXPR_ARG (exp, 2));
4344 if (result)
4345 return expand_expr (result, target, mode, EXPAND_NORMAL);
4348 /* If c_strlen can determine an expression for one of the string
4349 lengths, and it doesn't have side effects, then emit cmpstrnsi
4350 using length MIN(strlen(string)+1, arg3). */
4351 #ifdef HAVE_cmpstrnsi
4352 if (HAVE_cmpstrnsi)
4354 tree len, len1, len2;
4355 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4356 rtx result, insn;
4357 tree fndecl, fn;
4358 tree arg1 = CALL_EXPR_ARG (exp, 0);
4359 tree arg2 = CALL_EXPR_ARG (exp, 1);
4360 tree arg3 = CALL_EXPR_ARG (exp, 2);
4362 int arg1_align
4363 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4364 int arg2_align
4365 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4366 enum machine_mode insn_mode
4367 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4369 len1 = c_strlen (arg1, 1);
4370 len2 = c_strlen (arg2, 1);
4372 if (len1)
4373 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4374 if (len2)
4375 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4377 /* If we don't have a constant length for the first, use the length
4378 of the second, if we know it. We don't require a constant for
4379 this case; some cost analysis could be done if both are available
4380 but neither is constant. For now, assume they're equally cheap,
4381 unless one has side effects. If both strings have constant lengths,
4382 use the smaller. */
4384 if (!len1)
4385 len = len2;
4386 else if (!len2)
4387 len = len1;
4388 else if (TREE_SIDE_EFFECTS (len1))
4389 len = len2;
4390 else if (TREE_SIDE_EFFECTS (len2))
4391 len = len1;
4392 else if (TREE_CODE (len1) != INTEGER_CST)
4393 len = len2;
4394 else if (TREE_CODE (len2) != INTEGER_CST)
4395 len = len1;
4396 else if (tree_int_cst_lt (len1, len2))
4397 len = len1;
4398 else
4399 len = len2;
4401 /* If both arguments have side effects, we cannot optimize. */
4402 if (!len || TREE_SIDE_EFFECTS (len))
4403 return NULL_RTX;
4405 /* The actual new length parameter is MIN(len,arg3). */
4406 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4407 fold_convert (TREE_TYPE (len), arg3));
4409 /* If we don't have POINTER_TYPE, call the function. */
4410 if (arg1_align == 0 || arg2_align == 0)
4411 return NULL_RTX;
4413 /* Make a place to write the result of the instruction. */
4414 result = target;
4415 if (! (result != 0
4416 && REG_P (result) && GET_MODE (result) == insn_mode
4417 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4418 result = gen_reg_rtx (insn_mode);
4420 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4421 arg1 = builtin_save_expr (arg1);
4422 arg2 = builtin_save_expr (arg2);
4423 len = builtin_save_expr (len);
4425 arg1_rtx = get_memory_rtx (arg1, len);
4426 arg2_rtx = get_memory_rtx (arg2, len);
4427 arg3_rtx = expand_normal (len);
4428 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4429 GEN_INT (MIN (arg1_align, arg2_align)));
4430 if (insn)
4432 emit_insn (insn);
4434 /* Return the value in the proper mode for this function. */
4435 mode = TYPE_MODE (TREE_TYPE (exp));
4436 if (GET_MODE (result) == mode)
4437 return result;
4438 if (target == 0)
4439 return convert_to_mode (mode, result, 0);
4440 convert_move (target, result, 0);
4441 return target;
4444 /* Expand the library call ourselves using a stabilized argument
4445 list to avoid re-evaluating the function's arguments twice. */
4446 fndecl = get_callee_fndecl (exp);
4447 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4448 if (TREE_CODE (fn) == CALL_EXPR)
4449 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4450 return expand_call (fn, target, target == const0_rtx);
4452 #endif
4453 return NULL_RTX;
4456 /* Expand expression EXP, which is a call to the strcat builtin.
4457 Return NULL_RTX if we failed the caller should emit a normal call,
4458 otherwise try to get the result in TARGET, if convenient. */
4460 static rtx
4461 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4463 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4464 return NULL_RTX;
4465 else
4467 tree dst = CALL_EXPR_ARG (exp, 0);
4468 tree src = CALL_EXPR_ARG (exp, 1);
4469 const char *p = c_getstr (src);
4471 /* If the string length is zero, return the dst parameter. */
4472 if (p && *p == '\0')
4473 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4475 if (!optimize_size)
4477 /* See if we can store by pieces into (dst + strlen(dst)). */
4478 tree newsrc, newdst,
4479 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4480 rtx insns;
4482 /* Stabilize the argument list. */
4483 newsrc = builtin_save_expr (src);
4484 dst = builtin_save_expr (dst);
4486 start_sequence ();
4488 /* Create strlen (dst). */
4489 newdst = build_call_expr (strlen_fn, 1, dst);
4490 /* Create (dst p+ strlen (dst)). */
4492 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4493 newdst = builtin_save_expr (newdst);
4495 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4497 end_sequence (); /* Stop sequence. */
4498 return NULL_RTX;
4501 /* Output the entire sequence. */
4502 insns = get_insns ();
4503 end_sequence ();
4504 emit_insn (insns);
4506 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4509 return NULL_RTX;
4513 /* Expand expression EXP, which is a call to the strncat builtin.
4514 Return NULL_RTX if we failed the caller should emit a normal call,
4515 otherwise try to get the result in TARGET, if convenient. */
4517 static rtx
4518 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4520 if (validate_arglist (exp,
4521 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4523 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4524 CALL_EXPR_ARG (exp, 1),
4525 CALL_EXPR_ARG (exp, 2));
4526 if (result)
4527 return expand_expr (result, target, mode, EXPAND_NORMAL);
4529 return NULL_RTX;
4532 /* Expand expression EXP, which is a call to the strspn builtin.
4533 Return NULL_RTX if we failed the caller should emit a normal call,
4534 otherwise try to get the result in TARGET, if convenient. */
4536 static rtx
4537 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4539 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4541 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4542 CALL_EXPR_ARG (exp, 1));
4543 if (result)
4544 return expand_expr (result, target, mode, EXPAND_NORMAL);
4546 return NULL_RTX;
4549 /* Expand expression EXP, which is a call to the strcspn builtin.
4550 Return NULL_RTX if we failed the caller should emit a normal call,
4551 otherwise try to get the result in TARGET, if convenient. */
4553 static rtx
4554 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4556 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4558 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4559 CALL_EXPR_ARG (exp, 1));
4560 if (result)
4561 return expand_expr (result, target, mode, EXPAND_NORMAL);
4563 return NULL_RTX;
4566 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4567 if that's convenient. */
4570 expand_builtin_saveregs (void)
4572 rtx val, seq;
4574 /* Don't do __builtin_saveregs more than once in a function.
4575 Save the result of the first call and reuse it. */
4576 if (saveregs_value != 0)
4577 return saveregs_value;
4579 /* When this function is called, it means that registers must be
4580 saved on entry to this function. So we migrate the call to the
4581 first insn of this function. */
4583 start_sequence ();
4585 /* Do whatever the machine needs done in this case. */
4586 val = targetm.calls.expand_builtin_saveregs ();
4588 seq = get_insns ();
4589 end_sequence ();
4591 saveregs_value = val;
4593 /* Put the insns after the NOTE that starts the function. If this
4594 is inside a start_sequence, make the outer-level insn chain current, so
4595 the code is placed at the start of the function. */
4596 push_topmost_sequence ();
4597 emit_insn_after (seq, entry_of_function ());
4598 pop_topmost_sequence ();
4600 return val;
4603 /* __builtin_args_info (N) returns word N of the arg space info
4604 for the current function. The number and meanings of words
4605 is controlled by the definition of CUMULATIVE_ARGS. */
4607 static rtx
4608 expand_builtin_args_info (tree exp)
4610 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4611 int *word_ptr = (int *) &crtl->args.info;
4613 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4615 if (call_expr_nargs (exp) != 0)
4617 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4618 error ("argument of %<__builtin_args_info%> must be constant");
4619 else
4621 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4623 if (wordnum < 0 || wordnum >= nwords)
4624 error ("argument of %<__builtin_args_info%> out of range");
4625 else
4626 return GEN_INT (word_ptr[wordnum]);
4629 else
4630 error ("missing argument in %<__builtin_args_info%>");
4632 return const0_rtx;
4635 /* Expand a call to __builtin_next_arg. */
4637 static rtx
4638 expand_builtin_next_arg (void)
4640 /* Checking arguments is already done in fold_builtin_next_arg
4641 that must be called before this function. */
4642 return expand_binop (ptr_mode, add_optab,
4643 crtl->args.internal_arg_pointer,
4644 crtl->args.arg_offset_rtx,
4645 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4648 /* Make it easier for the backends by protecting the valist argument
4649 from multiple evaluations. */
4651 static tree
4652 stabilize_va_list (tree valist, int needs_lvalue)
4654 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4656 if (TREE_SIDE_EFFECTS (valist))
4657 valist = save_expr (valist);
4659 /* For this case, the backends will be expecting a pointer to
4660 TREE_TYPE (va_list_type_node), but it's possible we've
4661 actually been given an array (an actual va_list_type_node).
4662 So fix it. */
4663 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4665 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4666 valist = build_fold_addr_expr_with_type (valist, p1);
4669 else
4671 tree pt;
4673 if (! needs_lvalue)
4675 if (! TREE_SIDE_EFFECTS (valist))
4676 return valist;
4678 pt = build_pointer_type (va_list_type_node);
4679 valist = fold_build1 (ADDR_EXPR, pt, valist);
4680 TREE_SIDE_EFFECTS (valist) = 1;
4683 if (TREE_SIDE_EFFECTS (valist))
4684 valist = save_expr (valist);
4685 valist = build_fold_indirect_ref (valist);
4688 return valist;
4691 /* The "standard" definition of va_list is void*. */
4693 tree
4694 std_build_builtin_va_list (void)
4696 return ptr_type_node;
4699 /* The "standard" implementation of va_start: just assign `nextarg' to
4700 the variable. */
4702 void
4703 std_expand_builtin_va_start (tree valist, rtx nextarg)
4705 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4706 convert_move (va_r, nextarg, 0);
4709 /* Expand EXP, a call to __builtin_va_start. */
4711 static rtx
4712 expand_builtin_va_start (tree exp)
4714 rtx nextarg;
4715 tree valist;
4717 if (call_expr_nargs (exp) < 2)
4719 error ("too few arguments to function %<va_start%>");
4720 return const0_rtx;
4723 if (fold_builtin_next_arg (exp, true))
4724 return const0_rtx;
4726 nextarg = expand_builtin_next_arg ();
4727 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4729 if (targetm.expand_builtin_va_start)
4730 targetm.expand_builtin_va_start (valist, nextarg);
4731 else
4732 std_expand_builtin_va_start (valist, nextarg);
4734 return const0_rtx;
4737 /* The "standard" implementation of va_arg: read the value from the
4738 current (padded) address and increment by the (padded) size. */
4740 tree
4741 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4743 tree addr, t, type_size, rounded_size, valist_tmp;
4744 unsigned HOST_WIDE_INT align, boundary;
4745 bool indirect;
4747 #ifdef ARGS_GROW_DOWNWARD
4748 /* All of the alignment and movement below is for args-grow-up machines.
4749 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4750 implement their own specialized gimplify_va_arg_expr routines. */
4751 gcc_unreachable ();
4752 #endif
4754 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4755 if (indirect)
4756 type = build_pointer_type (type);
4758 align = PARM_BOUNDARY / BITS_PER_UNIT;
4759 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4761 /* Hoist the valist value into a temporary for the moment. */
4762 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4764 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4765 requires greater alignment, we must perform dynamic alignment. */
4766 if (boundary > align
4767 && !integer_zerop (TYPE_SIZE (type)))
4769 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4770 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4771 valist_tmp, size_int (boundary - 1)));
4772 gimplify_and_add (t, pre_p);
4774 t = fold_convert (sizetype, valist_tmp);
4775 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4776 fold_convert (TREE_TYPE (valist),
4777 fold_build2 (BIT_AND_EXPR, sizetype, t,
4778 size_int (-boundary))));
4779 gimplify_and_add (t, pre_p);
4781 else
4782 boundary = align;
4784 /* If the actual alignment is less than the alignment of the type,
4785 adjust the type accordingly so that we don't assume strict alignment
4786 when deferencing the pointer. */
4787 boundary *= BITS_PER_UNIT;
4788 if (boundary < TYPE_ALIGN (type))
4790 type = build_variant_type_copy (type);
4791 TYPE_ALIGN (type) = boundary;
4794 /* Compute the rounded size of the type. */
4795 type_size = size_in_bytes (type);
4796 rounded_size = round_up (type_size, align);
4798 /* Reduce rounded_size so it's sharable with the postqueue. */
4799 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4801 /* Get AP. */
4802 addr = valist_tmp;
4803 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4805 /* Small args are padded downward. */
4806 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4807 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4808 size_binop (MINUS_EXPR, rounded_size, type_size));
4809 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4812 /* Compute new value for AP. */
4813 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4814 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4815 gimplify_and_add (t, pre_p);
4817 addr = fold_convert (build_pointer_type (type), addr);
4819 if (indirect)
4820 addr = build_va_arg_indirect_ref (addr);
4822 return build_va_arg_indirect_ref (addr);
4825 /* Build an indirect-ref expression over the given TREE, which represents a
4826 piece of a va_arg() expansion. */
4827 tree
4828 build_va_arg_indirect_ref (tree addr)
4830 addr = build_fold_indirect_ref (addr);
4832 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4833 mf_mark (addr);
4835 return addr;
4838 /* Return a dummy expression of type TYPE in order to keep going after an
4839 error. */
4841 static tree
4842 dummy_object (tree type)
4844 tree t = build_int_cst (build_pointer_type (type), 0);
4845 return build1 (INDIRECT_REF, type, t);
4848 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4849 builtin function, but a very special sort of operator. */
4851 enum gimplify_status
4852 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4854 tree promoted_type, want_va_type, have_va_type;
4855 tree valist = TREE_OPERAND (*expr_p, 0);
4856 tree type = TREE_TYPE (*expr_p);
4857 tree t;
4859 /* Verify that valist is of the proper type. */
4860 want_va_type = va_list_type_node;
4861 have_va_type = TREE_TYPE (valist);
4863 if (have_va_type == error_mark_node)
4864 return GS_ERROR;
4866 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4868 /* If va_list is an array type, the argument may have decayed
4869 to a pointer type, e.g. by being passed to another function.
4870 In that case, unwrap both types so that we can compare the
4871 underlying records. */
4872 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4873 || POINTER_TYPE_P (have_va_type))
4875 want_va_type = TREE_TYPE (want_va_type);
4876 have_va_type = TREE_TYPE (have_va_type);
4880 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4882 error ("first argument to %<va_arg%> not of type %<va_list%>");
4883 return GS_ERROR;
4886 /* Generate a diagnostic for requesting data of a type that cannot
4887 be passed through `...' due to type promotion at the call site. */
4888 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4889 != type)
4891 static bool gave_help;
4893 /* Unfortunately, this is merely undefined, rather than a constraint
4894 violation, so we cannot make this an error. If this call is never
4895 executed, the program is still strictly conforming. */
4896 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4897 type, promoted_type);
4898 if (! gave_help)
4900 gave_help = true;
4901 inform ("(so you should pass %qT not %qT to %<va_arg%>)",
4902 promoted_type, type);
4905 /* We can, however, treat "undefined" any way we please.
4906 Call abort to encourage the user to fix the program. */
4907 inform ("if this code is reached, the program will abort");
4908 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4909 append_to_statement_list (t, pre_p);
4911 /* This is dead code, but go ahead and finish so that the
4912 mode of the result comes out right. */
4913 *expr_p = dummy_object (type);
4914 return GS_ALL_DONE;
4916 else
4918 /* Make it easier for the backends by protecting the valist argument
4919 from multiple evaluations. */
4920 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4922 /* For this case, the backends will be expecting a pointer to
4923 TREE_TYPE (va_list_type_node), but it's possible we've
4924 actually been given an array (an actual va_list_type_node).
4925 So fix it. */
4926 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4928 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4929 valist = build_fold_addr_expr_with_type (valist, p1);
4931 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4933 else
4934 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4936 if (!targetm.gimplify_va_arg_expr)
4937 /* FIXME:Once most targets are converted we should merely
4938 assert this is non-null. */
4939 return GS_ALL_DONE;
4941 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4942 return GS_OK;
4946 /* Expand EXP, a call to __builtin_va_end. */
4948 static rtx
4949 expand_builtin_va_end (tree exp)
4951 tree valist = CALL_EXPR_ARG (exp, 0);
4953 /* Evaluate for side effects, if needed. I hate macros that don't
4954 do that. */
4955 if (TREE_SIDE_EFFECTS (valist))
4956 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4958 return const0_rtx;
4961 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4962 builtin rather than just as an assignment in stdarg.h because of the
4963 nastiness of array-type va_list types. */
4965 static rtx
4966 expand_builtin_va_copy (tree exp)
4968 tree dst, src, t;
4970 dst = CALL_EXPR_ARG (exp, 0);
4971 src = CALL_EXPR_ARG (exp, 1);
4973 dst = stabilize_va_list (dst, 1);
4974 src = stabilize_va_list (src, 0);
4976 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4978 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4979 TREE_SIDE_EFFECTS (t) = 1;
4980 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4982 else
4984 rtx dstb, srcb, size;
4986 /* Evaluate to pointers. */
4987 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4988 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4989 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4990 VOIDmode, EXPAND_NORMAL);
4992 dstb = convert_memory_address (Pmode, dstb);
4993 srcb = convert_memory_address (Pmode, srcb);
4995 /* "Dereference" to BLKmode memories. */
4996 dstb = gen_rtx_MEM (BLKmode, dstb);
4997 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4998 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4999 srcb = gen_rtx_MEM (BLKmode, srcb);
5000 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5001 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
5003 /* Copy. */
5004 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5007 return const0_rtx;
5010 /* Expand a call to one of the builtin functions __builtin_frame_address or
5011 __builtin_return_address. */
5013 static rtx
5014 expand_builtin_frame_address (tree fndecl, tree exp)
5016 /* The argument must be a nonnegative integer constant.
5017 It counts the number of frames to scan up the stack.
5018 The value is the return address saved in that frame. */
5019 if (call_expr_nargs (exp) == 0)
5020 /* Warning about missing arg was already issued. */
5021 return const0_rtx;
5022 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
5024 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5025 error ("invalid argument to %<__builtin_frame_address%>");
5026 else
5027 error ("invalid argument to %<__builtin_return_address%>");
5028 return const0_rtx;
5030 else
5032 rtx tem
5033 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5034 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5036 /* Some ports cannot access arbitrary stack frames. */
5037 if (tem == NULL)
5039 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5040 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5041 else
5042 warning (0, "unsupported argument to %<__builtin_return_address%>");
5043 return const0_rtx;
5046 /* For __builtin_frame_address, return what we've got. */
5047 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5048 return tem;
5050 if (!REG_P (tem)
5051 && ! CONSTANT_P (tem))
5052 tem = copy_to_mode_reg (Pmode, tem);
5053 return tem;
5057 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5058 we failed and the caller should emit a normal call, otherwise try to get
5059 the result in TARGET, if convenient. */
5061 static rtx
5062 expand_builtin_alloca (tree exp, rtx target)
5064 rtx op0;
5065 rtx result;
5067 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5068 should always expand to function calls. These can be intercepted
5069 in libmudflap. */
5070 if (flag_mudflap)
5071 return NULL_RTX;
5073 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5074 return NULL_RTX;
5076 /* Compute the argument. */
5077 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5079 /* Allocate the desired space. */
5080 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5081 result = convert_memory_address (ptr_mode, result);
5083 return result;
5086 /* Expand a call to a bswap builtin with argument ARG0. MODE
5087 is the mode to expand with. */
5089 static rtx
5090 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5092 enum machine_mode mode;
5093 tree arg;
5094 rtx op0;
5096 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5097 return NULL_RTX;
5099 arg = CALL_EXPR_ARG (exp, 0);
5100 mode = TYPE_MODE (TREE_TYPE (arg));
5101 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5103 target = expand_unop (mode, bswap_optab, op0, target, 1);
5105 gcc_assert (target);
5107 return convert_to_mode (mode, target, 0);
5110 /* Expand a call to a unary builtin in EXP.
5111 Return NULL_RTX if a normal call should be emitted rather than expanding the
5112 function in-line. If convenient, the result should be placed in TARGET.
5113 SUBTARGET may be used as the target for computing one of EXP's operands. */
5115 static rtx
5116 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5117 rtx subtarget, optab op_optab)
5119 rtx op0;
5121 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5122 return NULL_RTX;
5124 /* Compute the argument. */
5125 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5126 VOIDmode, EXPAND_NORMAL);
5127 /* Compute op, into TARGET if possible.
5128 Set TARGET to wherever the result comes back. */
5129 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5130 op_optab, op0, target, 1);
5131 gcc_assert (target);
5133 return convert_to_mode (target_mode, target, 0);
5136 /* If the string passed to fputs is a constant and is one character
5137 long, we attempt to transform this call into __builtin_fputc(). */
5139 static rtx
5140 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5142 /* Verify the arguments in the original call. */
5143 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5145 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5146 CALL_EXPR_ARG (exp, 1),
5147 (target == const0_rtx),
5148 unlocked, NULL_TREE);
5149 if (result)
5150 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5152 return NULL_RTX;
5155 /* Expand a call to __builtin_expect. We just return our argument
5156 as the builtin_expect semantic should've been already executed by
5157 tree branch prediction pass. */
5159 static rtx
5160 expand_builtin_expect (tree exp, rtx target)
5162 tree arg, c;
5164 if (call_expr_nargs (exp) < 2)
5165 return const0_rtx;
5166 arg = CALL_EXPR_ARG (exp, 0);
5167 c = CALL_EXPR_ARG (exp, 1);
5169 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5170 /* When guessing was done, the hints should be already stripped away. */
5171 gcc_assert (!flag_guess_branch_prob
5172 || optimize == 0 || errorcount || sorrycount);
5173 return target;
5176 void
5177 expand_builtin_trap (void)
5179 #ifdef HAVE_trap
5180 if (HAVE_trap)
5181 emit_insn (gen_trap ());
5182 else
5183 #endif
5184 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5185 emit_barrier ();
5188 /* Expand EXP, a call to fabs, fabsf or fabsl.
5189 Return NULL_RTX if a normal call should be emitted rather than expanding
5190 the function inline. If convenient, the result should be placed
5191 in TARGET. SUBTARGET may be used as the target for computing
5192 the operand. */
5194 static rtx
5195 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5197 enum machine_mode mode;
5198 tree arg;
5199 rtx op0;
5201 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5202 return NULL_RTX;
5204 arg = CALL_EXPR_ARG (exp, 0);
5205 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5206 mode = TYPE_MODE (TREE_TYPE (arg));
5207 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5208 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5211 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5212 Return NULL is a normal call should be emitted rather than expanding the
5213 function inline. If convenient, the result should be placed in TARGET.
5214 SUBTARGET may be used as the target for computing the operand. */
5216 static rtx
5217 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5219 rtx op0, op1;
5220 tree arg;
5222 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5223 return NULL_RTX;
5225 arg = CALL_EXPR_ARG (exp, 0);
5226 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5228 arg = CALL_EXPR_ARG (exp, 1);
5229 op1 = expand_normal (arg);
5231 return expand_copysign (op0, op1, target);
5234 /* Create a new constant string literal and return a char* pointer to it.
5235 The STRING_CST value is the LEN characters at STR. */
5236 tree
5237 build_string_literal (int len, const char *str)
5239 tree t, elem, index, type;
5241 t = build_string (len, str);
5242 elem = build_type_variant (char_type_node, 1, 0);
5243 index = build_index_type (size_int (len - 1));
5244 type = build_array_type (elem, index);
5245 TREE_TYPE (t) = type;
5246 TREE_CONSTANT (t) = 1;
5247 TREE_READONLY (t) = 1;
5248 TREE_STATIC (t) = 1;
5250 type = build_pointer_type (elem);
5251 t = build1 (ADDR_EXPR, type,
5252 build4 (ARRAY_REF, elem,
5253 t, integer_zero_node, NULL_TREE, NULL_TREE));
5254 return t;
5257 /* Expand EXP, a call to printf or printf_unlocked.
5258 Return NULL_RTX if a normal call should be emitted rather than transforming
5259 the function inline. If convenient, the result should be placed in
5260 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5261 call. */
5262 static rtx
5263 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5264 bool unlocked)
5266 /* If we're using an unlocked function, assume the other unlocked
5267 functions exist explicitly. */
5268 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5269 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5270 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5271 : implicit_built_in_decls[BUILT_IN_PUTS];
5272 const char *fmt_str;
5273 tree fn = 0;
5274 tree fmt, arg;
5275 int nargs = call_expr_nargs (exp);
5277 /* If the return value is used, don't do the transformation. */
5278 if (target != const0_rtx)
5279 return NULL_RTX;
5281 /* Verify the required arguments in the original call. */
5282 if (nargs == 0)
5283 return NULL_RTX;
5284 fmt = CALL_EXPR_ARG (exp, 0);
5285 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5286 return NULL_RTX;
5288 /* Check whether the format is a literal string constant. */
5289 fmt_str = c_getstr (fmt);
5290 if (fmt_str == NULL)
5291 return NULL_RTX;
5293 if (!init_target_chars ())
5294 return NULL_RTX;
5296 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5297 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5299 if ((nargs != 2)
5300 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5301 return NULL_RTX;
5302 if (fn_puts)
5303 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5305 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5306 else if (strcmp (fmt_str, target_percent_c) == 0)
5308 if ((nargs != 2)
5309 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5310 return NULL_RTX;
5311 if (fn_putchar)
5312 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5314 else
5316 /* We can't handle anything else with % args or %% ... yet. */
5317 if (strchr (fmt_str, target_percent))
5318 return NULL_RTX;
5320 if (nargs > 1)
5321 return NULL_RTX;
5323 /* If the format specifier was "", printf does nothing. */
5324 if (fmt_str[0] == '\0')
5325 return const0_rtx;
5326 /* If the format specifier has length of 1, call putchar. */
5327 if (fmt_str[1] == '\0')
5329 /* Given printf("c"), (where c is any one character,)
5330 convert "c"[0] to an int and pass that to the replacement
5331 function. */
5332 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5333 if (fn_putchar)
5334 fn = build_call_expr (fn_putchar, 1, arg);
5336 else
5338 /* If the format specifier was "string\n", call puts("string"). */
5339 size_t len = strlen (fmt_str);
5340 if ((unsigned char)fmt_str[len - 1] == target_newline)
5342 /* Create a NUL-terminated string that's one char shorter
5343 than the original, stripping off the trailing '\n'. */
5344 char *newstr = alloca (len);
5345 memcpy (newstr, fmt_str, len - 1);
5346 newstr[len - 1] = 0;
5347 arg = build_string_literal (len, newstr);
5348 if (fn_puts)
5349 fn = build_call_expr (fn_puts, 1, arg);
5351 else
5352 /* We'd like to arrange to call fputs(string,stdout) here,
5353 but we need stdout and don't have a way to get it yet. */
5354 return NULL_RTX;
5358 if (!fn)
5359 return NULL_RTX;
5360 if (TREE_CODE (fn) == CALL_EXPR)
5361 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5362 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5365 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5366 Return NULL_RTX if a normal call should be emitted rather than transforming
5367 the function inline. If convenient, the result should be placed in
5368 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5369 call. */
5370 static rtx
5371 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5372 bool unlocked)
5374 /* If we're using an unlocked function, assume the other unlocked
5375 functions exist explicitly. */
5376 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5377 : implicit_built_in_decls[BUILT_IN_FPUTC];
5378 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5379 : implicit_built_in_decls[BUILT_IN_FPUTS];
5380 const char *fmt_str;
5381 tree fn = 0;
5382 tree fmt, fp, arg;
5383 int nargs = call_expr_nargs (exp);
5385 /* If the return value is used, don't do the transformation. */
5386 if (target != const0_rtx)
5387 return NULL_RTX;
5389 /* Verify the required arguments in the original call. */
5390 if (nargs < 2)
5391 return NULL_RTX;
5392 fp = CALL_EXPR_ARG (exp, 0);
5393 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5394 return NULL_RTX;
5395 fmt = CALL_EXPR_ARG (exp, 1);
5396 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5397 return NULL_RTX;
5399 /* Check whether the format is a literal string constant. */
5400 fmt_str = c_getstr (fmt);
5401 if (fmt_str == NULL)
5402 return NULL_RTX;
5404 if (!init_target_chars ())
5405 return NULL_RTX;
5407 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5408 if (strcmp (fmt_str, target_percent_s) == 0)
5410 if ((nargs != 3)
5411 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5412 return NULL_RTX;
5413 arg = CALL_EXPR_ARG (exp, 2);
5414 if (fn_fputs)
5415 fn = build_call_expr (fn_fputs, 2, arg, fp);
5417 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5418 else if (strcmp (fmt_str, target_percent_c) == 0)
5420 if ((nargs != 3)
5421 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5422 return NULL_RTX;
5423 arg = CALL_EXPR_ARG (exp, 2);
5424 if (fn_fputc)
5425 fn = build_call_expr (fn_fputc, 2, arg, fp);
5427 else
5429 /* We can't handle anything else with % args or %% ... yet. */
5430 if (strchr (fmt_str, target_percent))
5431 return NULL_RTX;
5433 if (nargs > 2)
5434 return NULL_RTX;
5436 /* If the format specifier was "", fprintf does nothing. */
5437 if (fmt_str[0] == '\0')
5439 /* Evaluate and ignore FILE* argument for side-effects. */
5440 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5441 return const0_rtx;
5444 /* When "string" doesn't contain %, replace all cases of
5445 fprintf(stream,string) with fputs(string,stream). The fputs
5446 builtin will take care of special cases like length == 1. */
5447 if (fn_fputs)
5448 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5451 if (!fn)
5452 return NULL_RTX;
5453 if (TREE_CODE (fn) == CALL_EXPR)
5454 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5455 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5458 /* Expand a call EXP to sprintf. Return NULL_RTX if
5459 a normal call should be emitted rather than expanding the function
5460 inline. If convenient, the result should be placed in TARGET with
5461 mode MODE. */
5463 static rtx
5464 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5466 tree dest, fmt;
5467 const char *fmt_str;
5468 int nargs = call_expr_nargs (exp);
5470 /* Verify the required arguments in the original call. */
5471 if (nargs < 2)
5472 return NULL_RTX;
5473 dest = CALL_EXPR_ARG (exp, 0);
5474 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5475 return NULL_RTX;
5476 fmt = CALL_EXPR_ARG (exp, 0);
5477 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5478 return NULL_RTX;
5480 /* Check whether the format is a literal string constant. */
5481 fmt_str = c_getstr (fmt);
5482 if (fmt_str == NULL)
5483 return NULL_RTX;
5485 if (!init_target_chars ())
5486 return NULL_RTX;
5488 /* If the format doesn't contain % args or %%, use strcpy. */
5489 if (strchr (fmt_str, target_percent) == 0)
5491 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5492 tree exp;
5494 if ((nargs > 2) || ! fn)
5495 return NULL_RTX;
5496 expand_expr (build_call_expr (fn, 2, dest, fmt),
5497 const0_rtx, VOIDmode, EXPAND_NORMAL);
5498 if (target == const0_rtx)
5499 return const0_rtx;
5500 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5501 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5503 /* If the format is "%s", use strcpy if the result isn't used. */
5504 else if (strcmp (fmt_str, target_percent_s) == 0)
5506 tree fn, arg, len;
5507 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5509 if (! fn)
5510 return NULL_RTX;
5511 if (nargs != 3)
5512 return NULL_RTX;
5513 arg = CALL_EXPR_ARG (exp, 2);
5514 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5515 return NULL_RTX;
5517 if (target != const0_rtx)
5519 len = c_strlen (arg, 1);
5520 if (! len || TREE_CODE (len) != INTEGER_CST)
5521 return NULL_RTX;
5523 else
5524 len = NULL_TREE;
5526 expand_expr (build_call_expr (fn, 2, dest, arg),
5527 const0_rtx, VOIDmode, EXPAND_NORMAL);
5529 if (target == const0_rtx)
5530 return const0_rtx;
5531 return expand_expr (len, target, mode, EXPAND_NORMAL);
5534 return NULL_RTX;
5537 /* Expand a call to either the entry or exit function profiler. */
5539 static rtx
5540 expand_builtin_profile_func (bool exitp)
5542 rtx this, which;
5544 this = DECL_RTL (current_function_decl);
5545 gcc_assert (MEM_P (this));
5546 this = XEXP (this, 0);
5548 if (exitp)
5549 which = profile_function_exit_libfunc;
5550 else
5551 which = profile_function_entry_libfunc;
5553 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5554 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5556 Pmode);
5558 return const0_rtx;
5561 /* Expand a call to __builtin___clear_cache. */
5563 static rtx
5564 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5566 #ifndef HAVE_clear_cache
5567 #ifdef CLEAR_INSN_CACHE
5568 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5569 does something. Just do the default expansion to a call to
5570 __clear_cache(). */
5571 return NULL_RTX;
5572 #else
5573 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5574 does nothing. There is no need to call it. Do nothing. */
5575 return const0_rtx;
5576 #endif /* CLEAR_INSN_CACHE */
5577 #else
5578 /* We have a "clear_cache" insn, and it will handle everything. */
5579 tree begin, end;
5580 rtx begin_rtx, end_rtx;
5581 enum insn_code icode;
5583 /* We must not expand to a library call. If we did, any
5584 fallback library function in libgcc that might contain a call to
5585 __builtin___clear_cache() would recurse infinitely. */
5586 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5588 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5589 return const0_rtx;
5592 if (HAVE_clear_cache)
5594 icode = CODE_FOR_clear_cache;
5596 begin = CALL_EXPR_ARG (exp, 0);
5597 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5598 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5599 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5600 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5602 end = CALL_EXPR_ARG (exp, 1);
5603 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5604 end_rtx = convert_memory_address (Pmode, end_rtx);
5605 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5606 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5608 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5610 return const0_rtx;
5611 #endif /* HAVE_clear_cache */
5614 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5616 static rtx
5617 round_trampoline_addr (rtx tramp)
5619 rtx temp, addend, mask;
5621 /* If we don't need too much alignment, we'll have been guaranteed
5622 proper alignment by get_trampoline_type. */
5623 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5624 return tramp;
5626 /* Round address up to desired boundary. */
5627 temp = gen_reg_rtx (Pmode);
5628 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5629 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5631 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5632 temp, 0, OPTAB_LIB_WIDEN);
5633 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5634 temp, 0, OPTAB_LIB_WIDEN);
5636 return tramp;
5639 static rtx
5640 expand_builtin_init_trampoline (tree exp)
5642 tree t_tramp, t_func, t_chain;
5643 rtx r_tramp, r_func, r_chain;
5644 #ifdef TRAMPOLINE_TEMPLATE
5645 rtx blktramp;
5646 #endif
5648 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5649 POINTER_TYPE, VOID_TYPE))
5650 return NULL_RTX;
5652 t_tramp = CALL_EXPR_ARG (exp, 0);
5653 t_func = CALL_EXPR_ARG (exp, 1);
5654 t_chain = CALL_EXPR_ARG (exp, 2);
5656 r_tramp = expand_normal (t_tramp);
5657 r_func = expand_normal (t_func);
5658 r_chain = expand_normal (t_chain);
5660 /* Generate insns to initialize the trampoline. */
5661 r_tramp = round_trampoline_addr (r_tramp);
5662 #ifdef TRAMPOLINE_TEMPLATE
5663 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5664 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5665 emit_block_move (blktramp, assemble_trampoline_template (),
5666 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5667 #endif
5668 trampolines_created = 1;
5669 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5671 return const0_rtx;
5674 static rtx
5675 expand_builtin_adjust_trampoline (tree exp)
5677 rtx tramp;
5679 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5680 return NULL_RTX;
5682 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5683 tramp = round_trampoline_addr (tramp);
5684 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5685 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5686 #endif
5688 return tramp;
5691 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5692 function. The function first checks whether the back end provides
5693 an insn to implement signbit for the respective mode. If not, it
5694 checks whether the floating point format of the value is such that
5695 the sign bit can be extracted. If that is not the case, the
5696 function returns NULL_RTX to indicate that a normal call should be
5697 emitted rather than expanding the function in-line. EXP is the
5698 expression that is a call to the builtin function; if convenient,
5699 the result should be placed in TARGET. */
5700 static rtx
5701 expand_builtin_signbit (tree exp, rtx target)
5703 const struct real_format *fmt;
5704 enum machine_mode fmode, imode, rmode;
5705 HOST_WIDE_INT hi, lo;
5706 tree arg;
5707 int word, bitpos;
5708 enum insn_code icode;
5709 rtx temp;
5711 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5712 return NULL_RTX;
5714 arg = CALL_EXPR_ARG (exp, 0);
5715 fmode = TYPE_MODE (TREE_TYPE (arg));
5716 rmode = TYPE_MODE (TREE_TYPE (exp));
5717 fmt = REAL_MODE_FORMAT (fmode);
5719 arg = builtin_save_expr (arg);
5721 /* Expand the argument yielding a RTX expression. */
5722 temp = expand_normal (arg);
5724 /* Check if the back end provides an insn that handles signbit for the
5725 argument's mode. */
5726 icode = signbit_optab->handlers [(int) fmode].insn_code;
5727 if (icode != CODE_FOR_nothing)
5729 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5730 emit_unop_insn (icode, target, temp, UNKNOWN);
5731 return target;
5734 /* For floating point formats without a sign bit, implement signbit
5735 as "ARG < 0.0". */
5736 bitpos = fmt->signbit_ro;
5737 if (bitpos < 0)
5739 /* But we can't do this if the format supports signed zero. */
5740 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5741 return NULL_RTX;
5743 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5744 build_real (TREE_TYPE (arg), dconst0));
5745 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5748 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5750 imode = int_mode_for_mode (fmode);
5751 if (imode == BLKmode)
5752 return NULL_RTX;
5753 temp = gen_lowpart (imode, temp);
5755 else
5757 imode = word_mode;
5758 /* Handle targets with different FP word orders. */
5759 if (FLOAT_WORDS_BIG_ENDIAN)
5760 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5761 else
5762 word = bitpos / BITS_PER_WORD;
5763 temp = operand_subword_force (temp, word, fmode);
5764 bitpos = bitpos % BITS_PER_WORD;
5767 /* Force the intermediate word_mode (or narrower) result into a
5768 register. This avoids attempting to create paradoxical SUBREGs
5769 of floating point modes below. */
5770 temp = force_reg (imode, temp);
5772 /* If the bitpos is within the "result mode" lowpart, the operation
5773 can be implement with a single bitwise AND. Otherwise, we need
5774 a right shift and an AND. */
5776 if (bitpos < GET_MODE_BITSIZE (rmode))
5778 if (bitpos < HOST_BITS_PER_WIDE_INT)
5780 hi = 0;
5781 lo = (HOST_WIDE_INT) 1 << bitpos;
5783 else
5785 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5786 lo = 0;
5789 if (imode != rmode)
5790 temp = gen_lowpart (rmode, temp);
5791 temp = expand_binop (rmode, and_optab, temp,
5792 immed_double_const (lo, hi, rmode),
5793 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5795 else
5797 /* Perform a logical right shift to place the signbit in the least
5798 significant bit, then truncate the result to the desired mode
5799 and mask just this bit. */
5800 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5801 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5802 temp = gen_lowpart (rmode, temp);
5803 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5804 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5807 return temp;
5810 /* Expand fork or exec calls. TARGET is the desired target of the
5811 call. EXP is the call. FN is the
5812 identificator of the actual function. IGNORE is nonzero if the
5813 value is to be ignored. */
5815 static rtx
5816 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5818 tree id, decl;
5819 tree call;
5821 /* If we are not profiling, just call the function. */
5822 if (!profile_arc_flag)
5823 return NULL_RTX;
5825 /* Otherwise call the wrapper. This should be equivalent for the rest of
5826 compiler, so the code does not diverge, and the wrapper may run the
5827 code necessary for keeping the profiling sane. */
5829 switch (DECL_FUNCTION_CODE (fn))
5831 case BUILT_IN_FORK:
5832 id = get_identifier ("__gcov_fork");
5833 break;
5835 case BUILT_IN_EXECL:
5836 id = get_identifier ("__gcov_execl");
5837 break;
5839 case BUILT_IN_EXECV:
5840 id = get_identifier ("__gcov_execv");
5841 break;
5843 case BUILT_IN_EXECLP:
5844 id = get_identifier ("__gcov_execlp");
5845 break;
5847 case BUILT_IN_EXECLE:
5848 id = get_identifier ("__gcov_execle");
5849 break;
5851 case BUILT_IN_EXECVP:
5852 id = get_identifier ("__gcov_execvp");
5853 break;
5855 case BUILT_IN_EXECVE:
5856 id = get_identifier ("__gcov_execve");
5857 break;
5859 default:
5860 gcc_unreachable ();
5863 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5864 DECL_EXTERNAL (decl) = 1;
5865 TREE_PUBLIC (decl) = 1;
5866 DECL_ARTIFICIAL (decl) = 1;
5867 TREE_NOTHROW (decl) = 1;
5868 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5869 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5870 call = rewrite_call_expr (exp, 0, decl, 0);
5871 return expand_call (call, target, ignore);
5876 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5877 the pointer in these functions is void*, the tree optimizers may remove
5878 casts. The mode computed in expand_builtin isn't reliable either, due
5879 to __sync_bool_compare_and_swap.
5881 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5882 group of builtins. This gives us log2 of the mode size. */
5884 static inline enum machine_mode
5885 get_builtin_sync_mode (int fcode_diff)
5887 /* The size is not negotiable, so ask not to get BLKmode in return
5888 if the target indicates that a smaller size would be better. */
5889 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5892 /* Expand the memory expression LOC and return the appropriate memory operand
5893 for the builtin_sync operations. */
5895 static rtx
5896 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5898 rtx addr, mem;
5900 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5902 /* Note that we explicitly do not want any alias information for this
5903 memory, so that we kill all other live memories. Otherwise we don't
5904 satisfy the full barrier semantics of the intrinsic. */
5905 mem = validize_mem (gen_rtx_MEM (mode, addr));
5907 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5908 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5909 MEM_VOLATILE_P (mem) = 1;
5911 return mem;
5914 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5915 EXP is the CALL_EXPR. CODE is the rtx code
5916 that corresponds to the arithmetic or logical operation from the name;
5917 an exception here is that NOT actually means NAND. TARGET is an optional
5918 place for us to store the results; AFTER is true if this is the
5919 fetch_and_xxx form. IGNORE is true if we don't actually care about
5920 the result of the operation at all. */
5922 static rtx
5923 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5924 enum rtx_code code, bool after,
5925 rtx target, bool ignore)
5927 rtx val, mem;
5928 enum machine_mode old_mode;
5930 /* Expand the operands. */
5931 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5933 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5934 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5935 of CONST_INTs, where we know the old_mode only from the call argument. */
5936 old_mode = GET_MODE (val);
5937 if (old_mode == VOIDmode)
5938 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5939 val = convert_modes (mode, old_mode, val, 1);
5941 if (ignore)
5942 return expand_sync_operation (mem, val, code);
5943 else
5944 return expand_sync_fetch_operation (mem, val, code, after, target);
5947 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5948 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5949 true if this is the boolean form. TARGET is a place for us to store the
5950 results; this is NOT optional if IS_BOOL is true. */
5952 static rtx
5953 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5954 bool is_bool, rtx target)
5956 rtx old_val, new_val, mem;
5957 enum machine_mode old_mode;
5959 /* Expand the operands. */
5960 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5963 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5964 mode, EXPAND_NORMAL);
5965 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5966 of CONST_INTs, where we know the old_mode only from the call argument. */
5967 old_mode = GET_MODE (old_val);
5968 if (old_mode == VOIDmode)
5969 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5970 old_val = convert_modes (mode, old_mode, old_val, 1);
5972 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5973 mode, EXPAND_NORMAL);
5974 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5975 of CONST_INTs, where we know the old_mode only from the call argument. */
5976 old_mode = GET_MODE (new_val);
5977 if (old_mode == VOIDmode)
5978 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5979 new_val = convert_modes (mode, old_mode, new_val, 1);
5981 if (is_bool)
5982 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5983 else
5984 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5987 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5988 general form is actually an atomic exchange, and some targets only
5989 support a reduced form with the second argument being a constant 1.
5990 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5991 the results. */
5993 static rtx
5994 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5995 rtx target)
5997 rtx val, mem;
5998 enum machine_mode old_mode;
6000 /* Expand the operands. */
6001 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6002 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
6003 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6004 of CONST_INTs, where we know the old_mode only from the call argument. */
6005 old_mode = GET_MODE (val);
6006 if (old_mode == VOIDmode)
6007 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
6008 val = convert_modes (mode, old_mode, val, 1);
6010 return expand_sync_lock_test_and_set (mem, val, target);
6013 /* Expand the __sync_synchronize intrinsic. */
6015 static void
6016 expand_builtin_synchronize (void)
6018 tree x;
6020 #ifdef HAVE_memory_barrier
6021 if (HAVE_memory_barrier)
6023 emit_insn (gen_memory_barrier ());
6024 return;
6026 #endif
6028 /* If no explicit memory barrier instruction is available, create an
6029 empty asm stmt with a memory clobber. */
6030 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6031 tree_cons (NULL, build_string (6, "memory"), NULL));
6032 ASM_VOLATILE_P (x) = 1;
6033 expand_asm_expr (x);
6036 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6038 static void
6039 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6041 enum insn_code icode;
6042 rtx mem, insn;
6043 rtx val = const0_rtx;
6045 /* Expand the operands. */
6046 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6048 /* If there is an explicit operation in the md file, use it. */
6049 icode = sync_lock_release[mode];
6050 if (icode != CODE_FOR_nothing)
6052 if (!insn_data[icode].operand[1].predicate (val, mode))
6053 val = force_reg (mode, val);
6055 insn = GEN_FCN (icode) (mem, val);
6056 if (insn)
6058 emit_insn (insn);
6059 return;
6063 /* Otherwise we can implement this operation by emitting a barrier
6064 followed by a store of zero. */
6065 expand_builtin_synchronize ();
6066 emit_move_insn (mem, val);
6069 /* Expand an expression EXP that calls a built-in function,
6070 with result going to TARGET if that's convenient
6071 (and in mode MODE if that's convenient).
6072 SUBTARGET may be used as the target for computing one of EXP's operands.
6073 IGNORE is nonzero if the value is to be ignored. */
6076 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6077 int ignore)
6079 tree fndecl = get_callee_fndecl (exp);
6080 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6081 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6083 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6084 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6086 /* When not optimizing, generate calls to library functions for a certain
6087 set of builtins. */
6088 if (!optimize
6089 && !called_as_built_in (fndecl)
6090 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6091 && fcode != BUILT_IN_ALLOCA)
6092 return expand_call (exp, target, ignore);
6094 /* The built-in function expanders test for target == const0_rtx
6095 to determine whether the function's result will be ignored. */
6096 if (ignore)
6097 target = const0_rtx;
6099 /* If the result of a pure or const built-in function is ignored, and
6100 none of its arguments are volatile, we can avoid expanding the
6101 built-in call and just evaluate the arguments for side-effects. */
6102 if (target == const0_rtx
6103 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6105 bool volatilep = false;
6106 tree arg;
6107 call_expr_arg_iterator iter;
6109 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6110 if (TREE_THIS_VOLATILE (arg))
6112 volatilep = true;
6113 break;
6116 if (! volatilep)
6118 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6119 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6120 return const0_rtx;
6124 switch (fcode)
6126 CASE_FLT_FN (BUILT_IN_FABS):
6127 target = expand_builtin_fabs (exp, target, subtarget);
6128 if (target)
6129 return target;
6130 break;
6132 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6133 target = expand_builtin_copysign (exp, target, subtarget);
6134 if (target)
6135 return target;
6136 break;
6138 /* Just do a normal library call if we were unable to fold
6139 the values. */
6140 CASE_FLT_FN (BUILT_IN_CABS):
6141 break;
6143 CASE_FLT_FN (BUILT_IN_EXP):
6144 CASE_FLT_FN (BUILT_IN_EXP10):
6145 CASE_FLT_FN (BUILT_IN_POW10):
6146 CASE_FLT_FN (BUILT_IN_EXP2):
6147 CASE_FLT_FN (BUILT_IN_EXPM1):
6148 CASE_FLT_FN (BUILT_IN_LOGB):
6149 CASE_FLT_FN (BUILT_IN_LOG):
6150 CASE_FLT_FN (BUILT_IN_LOG10):
6151 CASE_FLT_FN (BUILT_IN_LOG2):
6152 CASE_FLT_FN (BUILT_IN_LOG1P):
6153 CASE_FLT_FN (BUILT_IN_TAN):
6154 CASE_FLT_FN (BUILT_IN_ASIN):
6155 CASE_FLT_FN (BUILT_IN_ACOS):
6156 CASE_FLT_FN (BUILT_IN_ATAN):
6157 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6158 because of possible accuracy problems. */
6159 if (! flag_unsafe_math_optimizations)
6160 break;
6161 CASE_FLT_FN (BUILT_IN_SQRT):
6162 CASE_FLT_FN (BUILT_IN_FLOOR):
6163 CASE_FLT_FN (BUILT_IN_CEIL):
6164 CASE_FLT_FN (BUILT_IN_TRUNC):
6165 CASE_FLT_FN (BUILT_IN_ROUND):
6166 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6167 CASE_FLT_FN (BUILT_IN_RINT):
6168 target = expand_builtin_mathfn (exp, target, subtarget);
6169 if (target)
6170 return target;
6171 break;
6173 CASE_FLT_FN (BUILT_IN_ILOGB):
6174 if (! flag_unsafe_math_optimizations)
6175 break;
6176 CASE_FLT_FN (BUILT_IN_ISINF):
6177 CASE_FLT_FN (BUILT_IN_FINITE):
6178 case BUILT_IN_ISFINITE:
6179 case BUILT_IN_ISNORMAL:
6180 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6181 if (target)
6182 return target;
6183 break;
6185 CASE_FLT_FN (BUILT_IN_LCEIL):
6186 CASE_FLT_FN (BUILT_IN_LLCEIL):
6187 CASE_FLT_FN (BUILT_IN_LFLOOR):
6188 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6189 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6190 if (target)
6191 return target;
6192 break;
6194 CASE_FLT_FN (BUILT_IN_LRINT):
6195 CASE_FLT_FN (BUILT_IN_LLRINT):
6196 CASE_FLT_FN (BUILT_IN_LROUND):
6197 CASE_FLT_FN (BUILT_IN_LLROUND):
6198 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6199 if (target)
6200 return target;
6201 break;
6203 CASE_FLT_FN (BUILT_IN_POW):
6204 target = expand_builtin_pow (exp, target, subtarget);
6205 if (target)
6206 return target;
6207 break;
6209 CASE_FLT_FN (BUILT_IN_POWI):
6210 target = expand_builtin_powi (exp, target, subtarget);
6211 if (target)
6212 return target;
6213 break;
6215 CASE_FLT_FN (BUILT_IN_ATAN2):
6216 CASE_FLT_FN (BUILT_IN_LDEXP):
6217 CASE_FLT_FN (BUILT_IN_SCALB):
6218 CASE_FLT_FN (BUILT_IN_SCALBN):
6219 CASE_FLT_FN (BUILT_IN_SCALBLN):
6220 if (! flag_unsafe_math_optimizations)
6221 break;
6223 CASE_FLT_FN (BUILT_IN_FMOD):
6224 CASE_FLT_FN (BUILT_IN_REMAINDER):
6225 CASE_FLT_FN (BUILT_IN_DREM):
6226 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6227 if (target)
6228 return target;
6229 break;
6231 CASE_FLT_FN (BUILT_IN_CEXPI):
6232 target = expand_builtin_cexpi (exp, target, subtarget);
6233 gcc_assert (target);
6234 return target;
6236 CASE_FLT_FN (BUILT_IN_SIN):
6237 CASE_FLT_FN (BUILT_IN_COS):
6238 if (! flag_unsafe_math_optimizations)
6239 break;
6240 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6241 if (target)
6242 return target;
6243 break;
6245 CASE_FLT_FN (BUILT_IN_SINCOS):
6246 if (! flag_unsafe_math_optimizations)
6247 break;
6248 target = expand_builtin_sincos (exp);
6249 if (target)
6250 return target;
6251 break;
6253 case BUILT_IN_APPLY_ARGS:
6254 return expand_builtin_apply_args ();
6256 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6257 FUNCTION with a copy of the parameters described by
6258 ARGUMENTS, and ARGSIZE. It returns a block of memory
6259 allocated on the stack into which is stored all the registers
6260 that might possibly be used for returning the result of a
6261 function. ARGUMENTS is the value returned by
6262 __builtin_apply_args. ARGSIZE is the number of bytes of
6263 arguments that must be copied. ??? How should this value be
6264 computed? We'll also need a safe worst case value for varargs
6265 functions. */
6266 case BUILT_IN_APPLY:
6267 if (!validate_arglist (exp, POINTER_TYPE,
6268 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6269 && !validate_arglist (exp, REFERENCE_TYPE,
6270 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6271 return const0_rtx;
6272 else
6274 rtx ops[3];
6276 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6277 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6278 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6280 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6283 /* __builtin_return (RESULT) causes the function to return the
6284 value described by RESULT. RESULT is address of the block of
6285 memory returned by __builtin_apply. */
6286 case BUILT_IN_RETURN:
6287 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6288 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6289 return const0_rtx;
6291 case BUILT_IN_SAVEREGS:
6292 return expand_builtin_saveregs ();
6294 case BUILT_IN_ARGS_INFO:
6295 return expand_builtin_args_info (exp);
6297 case BUILT_IN_VA_ARG_PACK:
6298 /* All valid uses of __builtin_va_arg_pack () are removed during
6299 inlining. */
6300 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6301 return const0_rtx;
6303 case BUILT_IN_VA_ARG_PACK_LEN:
6304 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6305 inlining. */
6306 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6307 return const0_rtx;
6309 /* Return the address of the first anonymous stack arg. */
6310 case BUILT_IN_NEXT_ARG:
6311 if (fold_builtin_next_arg (exp, false))
6312 return const0_rtx;
6313 return expand_builtin_next_arg ();
6315 case BUILT_IN_CLEAR_CACHE:
6316 target = expand_builtin___clear_cache (exp);
6317 if (target)
6318 return target;
6319 break;
6321 case BUILT_IN_CLASSIFY_TYPE:
6322 return expand_builtin_classify_type (exp);
6324 case BUILT_IN_CONSTANT_P:
6325 return const0_rtx;
6327 case BUILT_IN_FRAME_ADDRESS:
6328 case BUILT_IN_RETURN_ADDRESS:
6329 return expand_builtin_frame_address (fndecl, exp);
6331 /* Returns the address of the area where the structure is returned.
6332 0 otherwise. */
6333 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6334 if (call_expr_nargs (exp) != 0
6335 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6336 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6337 return const0_rtx;
6338 else
6339 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6341 case BUILT_IN_ALLOCA:
6342 target = expand_builtin_alloca (exp, target);
6343 if (target)
6344 return target;
6345 break;
6347 case BUILT_IN_STACK_SAVE:
6348 return expand_stack_save ();
6350 case BUILT_IN_STACK_RESTORE:
6351 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6352 return const0_rtx;
6354 case BUILT_IN_BSWAP32:
6355 case BUILT_IN_BSWAP64:
6356 target = expand_builtin_bswap (exp, target, subtarget);
6358 if (target)
6359 return target;
6360 break;
6362 CASE_INT_FN (BUILT_IN_FFS):
6363 case BUILT_IN_FFSIMAX:
6364 target = expand_builtin_unop (target_mode, exp, target,
6365 subtarget, ffs_optab);
6366 if (target)
6367 return target;
6368 break;
6370 CASE_INT_FN (BUILT_IN_CLZ):
6371 case BUILT_IN_CLZIMAX:
6372 target = expand_builtin_unop (target_mode, exp, target,
6373 subtarget, clz_optab);
6374 if (target)
6375 return target;
6376 break;
6378 CASE_INT_FN (BUILT_IN_CTZ):
6379 case BUILT_IN_CTZIMAX:
6380 target = expand_builtin_unop (target_mode, exp, target,
6381 subtarget, ctz_optab);
6382 if (target)
6383 return target;
6384 break;
6386 CASE_INT_FN (BUILT_IN_POPCOUNT):
6387 case BUILT_IN_POPCOUNTIMAX:
6388 target = expand_builtin_unop (target_mode, exp, target,
6389 subtarget, popcount_optab);
6390 if (target)
6391 return target;
6392 break;
6394 CASE_INT_FN (BUILT_IN_PARITY):
6395 case BUILT_IN_PARITYIMAX:
6396 target = expand_builtin_unop (target_mode, exp, target,
6397 subtarget, parity_optab);
6398 if (target)
6399 return target;
6400 break;
6402 case BUILT_IN_STRLEN:
6403 target = expand_builtin_strlen (exp, target, target_mode);
6404 if (target)
6405 return target;
6406 break;
6408 case BUILT_IN_STRCPY:
6409 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6410 if (target)
6411 return target;
6412 break;
6414 case BUILT_IN_STRNCPY:
6415 target = expand_builtin_strncpy (exp, target, mode);
6416 if (target)
6417 return target;
6418 break;
6420 case BUILT_IN_STPCPY:
6421 target = expand_builtin_stpcpy (exp, target, mode);
6422 if (target)
6423 return target;
6424 break;
6426 case BUILT_IN_STRCAT:
6427 target = expand_builtin_strcat (fndecl, exp, target, mode);
6428 if (target)
6429 return target;
6430 break;
6432 case BUILT_IN_STRNCAT:
6433 target = expand_builtin_strncat (exp, target, mode);
6434 if (target)
6435 return target;
6436 break;
6438 case BUILT_IN_STRSPN:
6439 target = expand_builtin_strspn (exp, target, mode);
6440 if (target)
6441 return target;
6442 break;
6444 case BUILT_IN_STRCSPN:
6445 target = expand_builtin_strcspn (exp, target, mode);
6446 if (target)
6447 return target;
6448 break;
6450 case BUILT_IN_STRSTR:
6451 target = expand_builtin_strstr (exp, target, mode);
6452 if (target)
6453 return target;
6454 break;
6456 case BUILT_IN_STRPBRK:
6457 target = expand_builtin_strpbrk (exp, target, mode);
6458 if (target)
6459 return target;
6460 break;
6462 case BUILT_IN_INDEX:
6463 case BUILT_IN_STRCHR:
6464 target = expand_builtin_strchr (exp, target, mode);
6465 if (target)
6466 return target;
6467 break;
6469 case BUILT_IN_RINDEX:
6470 case BUILT_IN_STRRCHR:
6471 target = expand_builtin_strrchr (exp, target, mode);
6472 if (target)
6473 return target;
6474 break;
6476 case BUILT_IN_MEMCPY:
6477 target = expand_builtin_memcpy (exp, target, mode);
6478 if (target)
6479 return target;
6480 break;
6482 case BUILT_IN_MEMPCPY:
6483 target = expand_builtin_mempcpy (exp, target, mode);
6484 if (target)
6485 return target;
6486 break;
6488 case BUILT_IN_MEMMOVE:
6489 target = expand_builtin_memmove (exp, target, mode, ignore);
6490 if (target)
6491 return target;
6492 break;
6494 case BUILT_IN_BCOPY:
6495 target = expand_builtin_bcopy (exp, ignore);
6496 if (target)
6497 return target;
6498 break;
6500 case BUILT_IN_MEMSET:
6501 target = expand_builtin_memset (exp, target, mode);
6502 if (target)
6503 return target;
6504 break;
6506 case BUILT_IN_BZERO:
6507 target = expand_builtin_bzero (exp);
6508 if (target)
6509 return target;
6510 break;
6512 case BUILT_IN_STRCMP:
6513 target = expand_builtin_strcmp (exp, target, mode);
6514 if (target)
6515 return target;
6516 break;
6518 case BUILT_IN_STRNCMP:
6519 target = expand_builtin_strncmp (exp, target, mode);
6520 if (target)
6521 return target;
6522 break;
6524 case BUILT_IN_MEMCHR:
6525 target = expand_builtin_memchr (exp, target, mode);
6526 if (target)
6527 return target;
6528 break;
6530 case BUILT_IN_BCMP:
6531 case BUILT_IN_MEMCMP:
6532 target = expand_builtin_memcmp (exp, target, mode);
6533 if (target)
6534 return target;
6535 break;
6537 case BUILT_IN_SETJMP:
6538 /* This should have been lowered to the builtins below. */
6539 gcc_unreachable ();
6541 case BUILT_IN_SETJMP_SETUP:
6542 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6543 and the receiver label. */
6544 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6546 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6547 VOIDmode, EXPAND_NORMAL);
6548 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6549 rtx label_r = label_rtx (label);
6551 /* This is copied from the handling of non-local gotos. */
6552 expand_builtin_setjmp_setup (buf_addr, label_r);
6553 nonlocal_goto_handler_labels
6554 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6555 nonlocal_goto_handler_labels);
6556 /* ??? Do not let expand_label treat us as such since we would
6557 not want to be both on the list of non-local labels and on
6558 the list of forced labels. */
6559 FORCED_LABEL (label) = 0;
6560 return const0_rtx;
6562 break;
6564 case BUILT_IN_SETJMP_DISPATCHER:
6565 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6566 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6568 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6569 rtx label_r = label_rtx (label);
6571 /* Remove the dispatcher label from the list of non-local labels
6572 since the receiver labels have been added to it above. */
6573 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6574 return const0_rtx;
6576 break;
6578 case BUILT_IN_SETJMP_RECEIVER:
6579 /* __builtin_setjmp_receiver is passed the receiver label. */
6580 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6582 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6583 rtx label_r = label_rtx (label);
6585 expand_builtin_setjmp_receiver (label_r);
6586 return const0_rtx;
6588 break;
6590 /* __builtin_longjmp is passed a pointer to an array of five words.
6591 It's similar to the C library longjmp function but works with
6592 __builtin_setjmp above. */
6593 case BUILT_IN_LONGJMP:
6594 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6596 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6597 VOIDmode, EXPAND_NORMAL);
6598 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6600 if (value != const1_rtx)
6602 error ("%<__builtin_longjmp%> second argument must be 1");
6603 return const0_rtx;
6606 expand_builtin_longjmp (buf_addr, value);
6607 return const0_rtx;
6609 break;
6611 case BUILT_IN_NONLOCAL_GOTO:
6612 target = expand_builtin_nonlocal_goto (exp);
6613 if (target)
6614 return target;
6615 break;
6617 /* This updates the setjmp buffer that is its argument with the value
6618 of the current stack pointer. */
6619 case BUILT_IN_UPDATE_SETJMP_BUF:
6620 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6622 rtx buf_addr
6623 = expand_normal (CALL_EXPR_ARG (exp, 0));
6625 expand_builtin_update_setjmp_buf (buf_addr);
6626 return const0_rtx;
6628 break;
6630 case BUILT_IN_TRAP:
6631 expand_builtin_trap ();
6632 return const0_rtx;
6634 case BUILT_IN_PRINTF:
6635 target = expand_builtin_printf (exp, target, mode, false);
6636 if (target)
6637 return target;
6638 break;
6640 case BUILT_IN_PRINTF_UNLOCKED:
6641 target = expand_builtin_printf (exp, target, mode, true);
6642 if (target)
6643 return target;
6644 break;
6646 case BUILT_IN_FPUTS:
6647 target = expand_builtin_fputs (exp, target, false);
6648 if (target)
6649 return target;
6650 break;
6651 case BUILT_IN_FPUTS_UNLOCKED:
6652 target = expand_builtin_fputs (exp, target, true);
6653 if (target)
6654 return target;
6655 break;
6657 case BUILT_IN_FPRINTF:
6658 target = expand_builtin_fprintf (exp, target, mode, false);
6659 if (target)
6660 return target;
6661 break;
6663 case BUILT_IN_FPRINTF_UNLOCKED:
6664 target = expand_builtin_fprintf (exp, target, mode, true);
6665 if (target)
6666 return target;
6667 break;
6669 case BUILT_IN_SPRINTF:
6670 target = expand_builtin_sprintf (exp, target, mode);
6671 if (target)
6672 return target;
6673 break;
6675 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6676 case BUILT_IN_SIGNBITD32:
6677 case BUILT_IN_SIGNBITD64:
6678 case BUILT_IN_SIGNBITD128:
6679 target = expand_builtin_signbit (exp, target);
6680 if (target)
6681 return target;
6682 break;
6684 /* Various hooks for the DWARF 2 __throw routine. */
6685 case BUILT_IN_UNWIND_INIT:
6686 expand_builtin_unwind_init ();
6687 return const0_rtx;
6688 case BUILT_IN_DWARF_CFA:
6689 return virtual_cfa_rtx;
6690 #ifdef DWARF2_UNWIND_INFO
6691 case BUILT_IN_DWARF_SP_COLUMN:
6692 return expand_builtin_dwarf_sp_column ();
6693 case BUILT_IN_INIT_DWARF_REG_SIZES:
6694 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6695 return const0_rtx;
6696 #endif
6697 case BUILT_IN_FROB_RETURN_ADDR:
6698 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6699 case BUILT_IN_EXTRACT_RETURN_ADDR:
6700 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6701 case BUILT_IN_EH_RETURN:
6702 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6703 CALL_EXPR_ARG (exp, 1));
6704 return const0_rtx;
6705 #ifdef EH_RETURN_DATA_REGNO
6706 case BUILT_IN_EH_RETURN_DATA_REGNO:
6707 return expand_builtin_eh_return_data_regno (exp);
6708 #endif
6709 case BUILT_IN_EXTEND_POINTER:
6710 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6712 case BUILT_IN_VA_START:
6713 return expand_builtin_va_start (exp);
6714 case BUILT_IN_VA_END:
6715 return expand_builtin_va_end (exp);
6716 case BUILT_IN_VA_COPY:
6717 return expand_builtin_va_copy (exp);
6718 case BUILT_IN_EXPECT:
6719 return expand_builtin_expect (exp, target);
6720 case BUILT_IN_PREFETCH:
6721 expand_builtin_prefetch (exp);
6722 return const0_rtx;
6724 case BUILT_IN_PROFILE_FUNC_ENTER:
6725 return expand_builtin_profile_func (false);
6726 case BUILT_IN_PROFILE_FUNC_EXIT:
6727 return expand_builtin_profile_func (true);
6729 case BUILT_IN_INIT_TRAMPOLINE:
6730 return expand_builtin_init_trampoline (exp);
6731 case BUILT_IN_ADJUST_TRAMPOLINE:
6732 return expand_builtin_adjust_trampoline (exp);
6734 case BUILT_IN_FORK:
6735 case BUILT_IN_EXECL:
6736 case BUILT_IN_EXECV:
6737 case BUILT_IN_EXECLP:
6738 case BUILT_IN_EXECLE:
6739 case BUILT_IN_EXECVP:
6740 case BUILT_IN_EXECVE:
6741 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6742 if (target)
6743 return target;
6744 break;
6746 case BUILT_IN_FETCH_AND_ADD_1:
6747 case BUILT_IN_FETCH_AND_ADD_2:
6748 case BUILT_IN_FETCH_AND_ADD_4:
6749 case BUILT_IN_FETCH_AND_ADD_8:
6750 case BUILT_IN_FETCH_AND_ADD_16:
6751 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6752 target = expand_builtin_sync_operation (mode, exp, PLUS,
6753 false, target, ignore);
6754 if (target)
6755 return target;
6756 break;
6758 case BUILT_IN_FETCH_AND_SUB_1:
6759 case BUILT_IN_FETCH_AND_SUB_2:
6760 case BUILT_IN_FETCH_AND_SUB_4:
6761 case BUILT_IN_FETCH_AND_SUB_8:
6762 case BUILT_IN_FETCH_AND_SUB_16:
6763 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6764 target = expand_builtin_sync_operation (mode, exp, MINUS,
6765 false, target, ignore);
6766 if (target)
6767 return target;
6768 break;
6770 case BUILT_IN_FETCH_AND_OR_1:
6771 case BUILT_IN_FETCH_AND_OR_2:
6772 case BUILT_IN_FETCH_AND_OR_4:
6773 case BUILT_IN_FETCH_AND_OR_8:
6774 case BUILT_IN_FETCH_AND_OR_16:
6775 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6776 target = expand_builtin_sync_operation (mode, exp, IOR,
6777 false, target, ignore);
6778 if (target)
6779 return target;
6780 break;
6782 case BUILT_IN_FETCH_AND_AND_1:
6783 case BUILT_IN_FETCH_AND_AND_2:
6784 case BUILT_IN_FETCH_AND_AND_4:
6785 case BUILT_IN_FETCH_AND_AND_8:
6786 case BUILT_IN_FETCH_AND_AND_16:
6787 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6788 target = expand_builtin_sync_operation (mode, exp, AND,
6789 false, target, ignore);
6790 if (target)
6791 return target;
6792 break;
6794 case BUILT_IN_FETCH_AND_XOR_1:
6795 case BUILT_IN_FETCH_AND_XOR_2:
6796 case BUILT_IN_FETCH_AND_XOR_4:
6797 case BUILT_IN_FETCH_AND_XOR_8:
6798 case BUILT_IN_FETCH_AND_XOR_16:
6799 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6800 target = expand_builtin_sync_operation (mode, exp, XOR,
6801 false, target, ignore);
6802 if (target)
6803 return target;
6804 break;
6806 case BUILT_IN_FETCH_AND_NAND_1:
6807 case BUILT_IN_FETCH_AND_NAND_2:
6808 case BUILT_IN_FETCH_AND_NAND_4:
6809 case BUILT_IN_FETCH_AND_NAND_8:
6810 case BUILT_IN_FETCH_AND_NAND_16:
6811 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6812 target = expand_builtin_sync_operation (mode, exp, NOT,
6813 false, target, ignore);
6814 if (target)
6815 return target;
6816 break;
6818 case BUILT_IN_ADD_AND_FETCH_1:
6819 case BUILT_IN_ADD_AND_FETCH_2:
6820 case BUILT_IN_ADD_AND_FETCH_4:
6821 case BUILT_IN_ADD_AND_FETCH_8:
6822 case BUILT_IN_ADD_AND_FETCH_16:
6823 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6824 target = expand_builtin_sync_operation (mode, exp, PLUS,
6825 true, target, ignore);
6826 if (target)
6827 return target;
6828 break;
6830 case BUILT_IN_SUB_AND_FETCH_1:
6831 case BUILT_IN_SUB_AND_FETCH_2:
6832 case BUILT_IN_SUB_AND_FETCH_4:
6833 case BUILT_IN_SUB_AND_FETCH_8:
6834 case BUILT_IN_SUB_AND_FETCH_16:
6835 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6836 target = expand_builtin_sync_operation (mode, exp, MINUS,
6837 true, target, ignore);
6838 if (target)
6839 return target;
6840 break;
6842 case BUILT_IN_OR_AND_FETCH_1:
6843 case BUILT_IN_OR_AND_FETCH_2:
6844 case BUILT_IN_OR_AND_FETCH_4:
6845 case BUILT_IN_OR_AND_FETCH_8:
6846 case BUILT_IN_OR_AND_FETCH_16:
6847 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6848 target = expand_builtin_sync_operation (mode, exp, IOR,
6849 true, target, ignore);
6850 if (target)
6851 return target;
6852 break;
6854 case BUILT_IN_AND_AND_FETCH_1:
6855 case BUILT_IN_AND_AND_FETCH_2:
6856 case BUILT_IN_AND_AND_FETCH_4:
6857 case BUILT_IN_AND_AND_FETCH_8:
6858 case BUILT_IN_AND_AND_FETCH_16:
6859 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6860 target = expand_builtin_sync_operation (mode, exp, AND,
6861 true, target, ignore);
6862 if (target)
6863 return target;
6864 break;
6866 case BUILT_IN_XOR_AND_FETCH_1:
6867 case BUILT_IN_XOR_AND_FETCH_2:
6868 case BUILT_IN_XOR_AND_FETCH_4:
6869 case BUILT_IN_XOR_AND_FETCH_8:
6870 case BUILT_IN_XOR_AND_FETCH_16:
6871 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6872 target = expand_builtin_sync_operation (mode, exp, XOR,
6873 true, target, ignore);
6874 if (target)
6875 return target;
6876 break;
6878 case BUILT_IN_NAND_AND_FETCH_1:
6879 case BUILT_IN_NAND_AND_FETCH_2:
6880 case BUILT_IN_NAND_AND_FETCH_4:
6881 case BUILT_IN_NAND_AND_FETCH_8:
6882 case BUILT_IN_NAND_AND_FETCH_16:
6883 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6884 target = expand_builtin_sync_operation (mode, exp, NOT,
6885 true, target, ignore);
6886 if (target)
6887 return target;
6888 break;
6890 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6891 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6892 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6893 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6894 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6895 if (mode == VOIDmode)
6896 mode = TYPE_MODE (boolean_type_node);
6897 if (!target || !register_operand (target, mode))
6898 target = gen_reg_rtx (mode);
6900 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6901 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6902 if (target)
6903 return target;
6904 break;
6906 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6907 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6908 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6909 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6910 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6911 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6912 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6913 if (target)
6914 return target;
6915 break;
6917 case BUILT_IN_LOCK_TEST_AND_SET_1:
6918 case BUILT_IN_LOCK_TEST_AND_SET_2:
6919 case BUILT_IN_LOCK_TEST_AND_SET_4:
6920 case BUILT_IN_LOCK_TEST_AND_SET_8:
6921 case BUILT_IN_LOCK_TEST_AND_SET_16:
6922 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6923 target = expand_builtin_lock_test_and_set (mode, exp, target);
6924 if (target)
6925 return target;
6926 break;
6928 case BUILT_IN_LOCK_RELEASE_1:
6929 case BUILT_IN_LOCK_RELEASE_2:
6930 case BUILT_IN_LOCK_RELEASE_4:
6931 case BUILT_IN_LOCK_RELEASE_8:
6932 case BUILT_IN_LOCK_RELEASE_16:
6933 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6934 expand_builtin_lock_release (mode, exp);
6935 return const0_rtx;
6937 case BUILT_IN_SYNCHRONIZE:
6938 expand_builtin_synchronize ();
6939 return const0_rtx;
6941 case BUILT_IN_OBJECT_SIZE:
6942 return expand_builtin_object_size (exp);
6944 case BUILT_IN_MEMCPY_CHK:
6945 case BUILT_IN_MEMPCPY_CHK:
6946 case BUILT_IN_MEMMOVE_CHK:
6947 case BUILT_IN_MEMSET_CHK:
6948 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6949 if (target)
6950 return target;
6951 break;
6953 case BUILT_IN_STRCPY_CHK:
6954 case BUILT_IN_STPCPY_CHK:
6955 case BUILT_IN_STRNCPY_CHK:
6956 case BUILT_IN_STRCAT_CHK:
6957 case BUILT_IN_STRNCAT_CHK:
6958 case BUILT_IN_SNPRINTF_CHK:
6959 case BUILT_IN_VSNPRINTF_CHK:
6960 maybe_emit_chk_warning (exp, fcode);
6961 break;
6963 case BUILT_IN_SPRINTF_CHK:
6964 case BUILT_IN_VSPRINTF_CHK:
6965 maybe_emit_sprintf_chk_warning (exp, fcode);
6966 break;
6968 default: /* just do library call, if unknown builtin */
6969 break;
6972 /* The switch statement above can drop through to cause the function
6973 to be called normally. */
6974 return expand_call (exp, target, ignore);
6977 /* Determine whether a tree node represents a call to a built-in
6978 function. If the tree T is a call to a built-in function with
6979 the right number of arguments of the appropriate types, return
6980 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6981 Otherwise the return value is END_BUILTINS. */
6983 enum built_in_function
6984 builtin_mathfn_code (const_tree t)
6986 const_tree fndecl, arg, parmlist;
6987 const_tree argtype, parmtype;
6988 const_call_expr_arg_iterator iter;
6990 if (TREE_CODE (t) != CALL_EXPR
6991 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6992 return END_BUILTINS;
6994 fndecl = get_callee_fndecl (t);
6995 if (fndecl == NULL_TREE
6996 || TREE_CODE (fndecl) != FUNCTION_DECL
6997 || ! DECL_BUILT_IN (fndecl)
6998 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6999 return END_BUILTINS;
7001 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7002 init_const_call_expr_arg_iterator (t, &iter);
7003 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7005 /* If a function doesn't take a variable number of arguments,
7006 the last element in the list will have type `void'. */
7007 parmtype = TREE_VALUE (parmlist);
7008 if (VOID_TYPE_P (parmtype))
7010 if (more_const_call_expr_args_p (&iter))
7011 return END_BUILTINS;
7012 return DECL_FUNCTION_CODE (fndecl);
7015 if (! more_const_call_expr_args_p (&iter))
7016 return END_BUILTINS;
7018 arg = next_const_call_expr_arg (&iter);
7019 argtype = TREE_TYPE (arg);
7021 if (SCALAR_FLOAT_TYPE_P (parmtype))
7023 if (! SCALAR_FLOAT_TYPE_P (argtype))
7024 return END_BUILTINS;
7026 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7028 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7029 return END_BUILTINS;
7031 else if (POINTER_TYPE_P (parmtype))
7033 if (! POINTER_TYPE_P (argtype))
7034 return END_BUILTINS;
7036 else if (INTEGRAL_TYPE_P (parmtype))
7038 if (! INTEGRAL_TYPE_P (argtype))
7039 return END_BUILTINS;
7041 else
7042 return END_BUILTINS;
7045 /* Variable-length argument list. */
7046 return DECL_FUNCTION_CODE (fndecl);
7049 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7050 evaluate to a constant. */
7052 static tree
7053 fold_builtin_constant_p (tree arg)
7055 /* We return 1 for a numeric type that's known to be a constant
7056 value at compile-time or for an aggregate type that's a
7057 literal constant. */
7058 STRIP_NOPS (arg);
7060 /* If we know this is a constant, emit the constant of one. */
7061 if (CONSTANT_CLASS_P (arg)
7062 || (TREE_CODE (arg) == CONSTRUCTOR
7063 && TREE_CONSTANT (arg)))
7064 return integer_one_node;
7065 if (TREE_CODE (arg) == ADDR_EXPR)
7067 tree op = TREE_OPERAND (arg, 0);
7068 if (TREE_CODE (op) == STRING_CST
7069 || (TREE_CODE (op) == ARRAY_REF
7070 && integer_zerop (TREE_OPERAND (op, 1))
7071 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7072 return integer_one_node;
7075 /* If this expression has side effects, show we don't know it to be a
7076 constant. Likewise if it's a pointer or aggregate type since in
7077 those case we only want literals, since those are only optimized
7078 when generating RTL, not later.
7079 And finally, if we are compiling an initializer, not code, we
7080 need to return a definite result now; there's not going to be any
7081 more optimization done. */
7082 if (TREE_SIDE_EFFECTS (arg)
7083 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7084 || POINTER_TYPE_P (TREE_TYPE (arg))
7085 || cfun == 0
7086 || folding_initializer)
7087 return integer_zero_node;
7089 return NULL_TREE;
7092 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7093 return it as a truthvalue. */
7095 static tree
7096 build_builtin_expect_predicate (tree pred, tree expected)
7098 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7100 fn = built_in_decls[BUILT_IN_EXPECT];
7101 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7102 ret_type = TREE_TYPE (TREE_TYPE (fn));
7103 pred_type = TREE_VALUE (arg_types);
7104 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7106 pred = fold_convert (pred_type, pred);
7107 expected = fold_convert (expected_type, expected);
7108 call_expr = build_call_expr (fn, 2, pred, expected);
7110 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7111 build_int_cst (ret_type, 0));
7114 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7115 NULL_TREE if no simplification is possible. */
7117 static tree
7118 fold_builtin_expect (tree arg0, tree arg1)
7120 tree inner, fndecl;
7121 enum tree_code code;
7123 /* If this is a builtin_expect within a builtin_expect keep the
7124 inner one. See through a comparison against a constant. It
7125 might have been added to create a thruthvalue. */
7126 inner = arg0;
7127 if (COMPARISON_CLASS_P (inner)
7128 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7129 inner = TREE_OPERAND (inner, 0);
7131 if (TREE_CODE (inner) == CALL_EXPR
7132 && (fndecl = get_callee_fndecl (inner))
7133 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7134 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7135 return arg0;
7137 /* Distribute the expected value over short-circuiting operators.
7138 See through the cast from truthvalue_type_node to long. */
7139 inner = arg0;
7140 while (TREE_CODE (inner) == NOP_EXPR
7141 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7142 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7143 inner = TREE_OPERAND (inner, 0);
7145 code = TREE_CODE (inner);
7146 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7148 tree op0 = TREE_OPERAND (inner, 0);
7149 tree op1 = TREE_OPERAND (inner, 1);
7151 op0 = build_builtin_expect_predicate (op0, arg1);
7152 op1 = build_builtin_expect_predicate (op1, arg1);
7153 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7155 return fold_convert (TREE_TYPE (arg0), inner);
7158 /* If the argument isn't invariant then there's nothing else we can do. */
7159 if (!TREE_CONSTANT (arg0))
7160 return NULL_TREE;
7162 /* If we expect that a comparison against the argument will fold to
7163 a constant return the constant. In practice, this means a true
7164 constant or the address of a non-weak symbol. */
7165 inner = arg0;
7166 STRIP_NOPS (inner);
7167 if (TREE_CODE (inner) == ADDR_EXPR)
7171 inner = TREE_OPERAND (inner, 0);
7173 while (TREE_CODE (inner) == COMPONENT_REF
7174 || TREE_CODE (inner) == ARRAY_REF);
7175 if (DECL_P (inner) && DECL_WEAK (inner))
7176 return NULL_TREE;
7179 /* Otherwise, ARG0 already has the proper type for the return value. */
7180 return arg0;
7183 /* Fold a call to __builtin_classify_type with argument ARG. */
7185 static tree
7186 fold_builtin_classify_type (tree arg)
7188 if (arg == 0)
7189 return build_int_cst (NULL_TREE, no_type_class);
7191 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7194 /* Fold a call to __builtin_strlen with argument ARG. */
7196 static tree
7197 fold_builtin_strlen (tree arg)
7199 if (!validate_arg (arg, POINTER_TYPE))
7200 return NULL_TREE;
7201 else
7203 tree len = c_strlen (arg, 0);
7205 if (len)
7207 /* Convert from the internal "sizetype" type to "size_t". */
7208 if (size_type_node)
7209 len = fold_convert (size_type_node, len);
7210 return len;
7213 return NULL_TREE;
7217 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7219 static tree
7220 fold_builtin_inf (tree type, int warn)
7222 REAL_VALUE_TYPE real;
7224 /* __builtin_inff is intended to be usable to define INFINITY on all
7225 targets. If an infinity is not available, INFINITY expands "to a
7226 positive constant of type float that overflows at translation
7227 time", footnote "In this case, using INFINITY will violate the
7228 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7229 Thus we pedwarn to ensure this constraint violation is
7230 diagnosed. */
7231 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7232 pedwarn ("target format does not support infinity");
7234 real_inf (&real);
7235 return build_real (type, real);
7238 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7240 static tree
7241 fold_builtin_nan (tree arg, tree type, int quiet)
7243 REAL_VALUE_TYPE real;
7244 const char *str;
7246 if (!validate_arg (arg, POINTER_TYPE))
7247 return NULL_TREE;
7248 str = c_getstr (arg);
7249 if (!str)
7250 return NULL_TREE;
7252 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7253 return NULL_TREE;
7255 return build_real (type, real);
7258 /* Return true if the floating point expression T has an integer value.
7259 We also allow +Inf, -Inf and NaN to be considered integer values. */
7261 static bool
7262 integer_valued_real_p (tree t)
7264 switch (TREE_CODE (t))
7266 case FLOAT_EXPR:
7267 return true;
7269 case ABS_EXPR:
7270 case SAVE_EXPR:
7271 return integer_valued_real_p (TREE_OPERAND (t, 0));
7273 case COMPOUND_EXPR:
7274 case MODIFY_EXPR:
7275 case BIND_EXPR:
7276 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7278 case PLUS_EXPR:
7279 case MINUS_EXPR:
7280 case MULT_EXPR:
7281 case MIN_EXPR:
7282 case MAX_EXPR:
7283 return integer_valued_real_p (TREE_OPERAND (t, 0))
7284 && integer_valued_real_p (TREE_OPERAND (t, 1));
7286 case COND_EXPR:
7287 return integer_valued_real_p (TREE_OPERAND (t, 1))
7288 && integer_valued_real_p (TREE_OPERAND (t, 2));
7290 case REAL_CST:
7291 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7293 case NOP_EXPR:
7295 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7296 if (TREE_CODE (type) == INTEGER_TYPE)
7297 return true;
7298 if (TREE_CODE (type) == REAL_TYPE)
7299 return integer_valued_real_p (TREE_OPERAND (t, 0));
7300 break;
7303 case CALL_EXPR:
7304 switch (builtin_mathfn_code (t))
7306 CASE_FLT_FN (BUILT_IN_CEIL):
7307 CASE_FLT_FN (BUILT_IN_FLOOR):
7308 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7309 CASE_FLT_FN (BUILT_IN_RINT):
7310 CASE_FLT_FN (BUILT_IN_ROUND):
7311 CASE_FLT_FN (BUILT_IN_TRUNC):
7312 return true;
7314 CASE_FLT_FN (BUILT_IN_FMIN):
7315 CASE_FLT_FN (BUILT_IN_FMAX):
7316 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7317 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7319 default:
7320 break;
7322 break;
7324 default:
7325 break;
7327 return false;
7330 /* FNDECL is assumed to be a builtin where truncation can be propagated
7331 across (for instance floor((double)f) == (double)floorf (f).
7332 Do the transformation for a call with argument ARG. */
7334 static tree
7335 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7337 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7339 if (!validate_arg (arg, REAL_TYPE))
7340 return NULL_TREE;
7342 /* Integer rounding functions are idempotent. */
7343 if (fcode == builtin_mathfn_code (arg))
7344 return arg;
7346 /* If argument is already integer valued, and we don't need to worry
7347 about setting errno, there's no need to perform rounding. */
7348 if (! flag_errno_math && integer_valued_real_p (arg))
7349 return arg;
7351 if (optimize)
7353 tree arg0 = strip_float_extensions (arg);
7354 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7355 tree newtype = TREE_TYPE (arg0);
7356 tree decl;
7358 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7359 && (decl = mathfn_built_in (newtype, fcode)))
7360 return fold_convert (ftype,
7361 build_call_expr (decl, 1,
7362 fold_convert (newtype, arg0)));
7364 return NULL_TREE;
7367 /* FNDECL is assumed to be builtin which can narrow the FP type of
7368 the argument, for instance lround((double)f) -> lroundf (f).
7369 Do the transformation for a call with argument ARG. */
7371 static tree
7372 fold_fixed_mathfn (tree fndecl, tree arg)
7374 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7376 if (!validate_arg (arg, REAL_TYPE))
7377 return NULL_TREE;
7379 /* If argument is already integer valued, and we don't need to worry
7380 about setting errno, there's no need to perform rounding. */
7381 if (! flag_errno_math && integer_valued_real_p (arg))
7382 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7384 if (optimize)
7386 tree ftype = TREE_TYPE (arg);
7387 tree arg0 = strip_float_extensions (arg);
7388 tree newtype = TREE_TYPE (arg0);
7389 tree decl;
7391 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7392 && (decl = mathfn_built_in (newtype, fcode)))
7393 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7396 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7397 sizeof (long long) == sizeof (long). */
7398 if (TYPE_PRECISION (long_long_integer_type_node)
7399 == TYPE_PRECISION (long_integer_type_node))
7401 tree newfn = NULL_TREE;
7402 switch (fcode)
7404 CASE_FLT_FN (BUILT_IN_LLCEIL):
7405 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7406 break;
7408 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7409 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7410 break;
7412 CASE_FLT_FN (BUILT_IN_LLROUND):
7413 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7414 break;
7416 CASE_FLT_FN (BUILT_IN_LLRINT):
7417 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7418 break;
7420 default:
7421 break;
7424 if (newfn)
7426 tree newcall = build_call_expr(newfn, 1, arg);
7427 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7431 return NULL_TREE;
7434 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7435 return type. Return NULL_TREE if no simplification can be made. */
7437 static tree
7438 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7440 tree res;
7442 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7443 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7444 return NULL_TREE;
7446 /* Calculate the result when the argument is a constant. */
7447 if (TREE_CODE (arg) == COMPLEX_CST
7448 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7449 type, mpfr_hypot)))
7450 return res;
7452 if (TREE_CODE (arg) == COMPLEX_EXPR)
7454 tree real = TREE_OPERAND (arg, 0);
7455 tree imag = TREE_OPERAND (arg, 1);
7457 /* If either part is zero, cabs is fabs of the other. */
7458 if (real_zerop (real))
7459 return fold_build1 (ABS_EXPR, type, imag);
7460 if (real_zerop (imag))
7461 return fold_build1 (ABS_EXPR, type, real);
7463 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7464 if (flag_unsafe_math_optimizations
7465 && operand_equal_p (real, imag, OEP_PURE_SAME))
7467 const REAL_VALUE_TYPE sqrt2_trunc
7468 = real_value_truncate (TYPE_MODE (type),
7469 *get_real_const (rv_sqrt2));
7470 STRIP_NOPS (real);
7471 return fold_build2 (MULT_EXPR, type,
7472 fold_build1 (ABS_EXPR, type, real),
7473 build_real (type, sqrt2_trunc));
7477 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7478 if (TREE_CODE (arg) == NEGATE_EXPR
7479 || TREE_CODE (arg) == CONJ_EXPR)
7480 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7482 /* Don't do this when optimizing for size. */
7483 if (flag_unsafe_math_optimizations
7484 && optimize && !optimize_size)
7486 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7488 if (sqrtfn != NULL_TREE)
7490 tree rpart, ipart, result;
7492 arg = builtin_save_expr (arg);
7494 rpart = fold_build1 (REALPART_EXPR, type, arg);
7495 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7497 rpart = builtin_save_expr (rpart);
7498 ipart = builtin_save_expr (ipart);
7500 result = fold_build2 (PLUS_EXPR, type,
7501 fold_build2 (MULT_EXPR, type,
7502 rpart, rpart),
7503 fold_build2 (MULT_EXPR, type,
7504 ipart, ipart));
7506 return build_call_expr (sqrtfn, 1, result);
7510 return NULL_TREE;
7513 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7514 Return NULL_TREE if no simplification can be made. */
7516 static tree
7517 fold_builtin_sqrt (tree arg, tree type)
7520 enum built_in_function fcode;
7521 tree res;
7523 if (!validate_arg (arg, REAL_TYPE))
7524 return NULL_TREE;
7526 /* Calculate the result when the argument is a constant. */
7527 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7528 return res;
7530 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7531 fcode = builtin_mathfn_code (arg);
7532 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7534 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7535 arg = fold_build2 (MULT_EXPR, type,
7536 CALL_EXPR_ARG (arg, 0),
7537 build_real (type, dconsthalf));
7538 return build_call_expr (expfn, 1, arg);
7541 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7542 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7544 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7546 if (powfn)
7548 tree arg0 = CALL_EXPR_ARG (arg, 0);
7549 tree tree_root;
7550 /* The inner root was either sqrt or cbrt. */
7551 REAL_VALUE_TYPE dconstroot =
7552 BUILTIN_SQRT_P (fcode) ? dconsthalf : *get_real_const (rv_third);
7554 /* Adjust for the outer root. */
7555 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7556 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7557 tree_root = build_real (type, dconstroot);
7558 return build_call_expr (powfn, 2, arg0, tree_root);
7562 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7563 if (flag_unsafe_math_optimizations
7564 && (fcode == BUILT_IN_POW
7565 || fcode == BUILT_IN_POWF
7566 || fcode == BUILT_IN_POWL))
7568 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7569 tree arg0 = CALL_EXPR_ARG (arg, 0);
7570 tree arg1 = CALL_EXPR_ARG (arg, 1);
7571 tree narg1;
7572 if (!tree_expr_nonnegative_p (arg0))
7573 arg0 = build1 (ABS_EXPR, type, arg0);
7574 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7575 build_real (type, dconsthalf));
7576 return build_call_expr (powfn, 2, arg0, narg1);
7579 return NULL_TREE;
7582 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7583 Return NULL_TREE if no simplification can be made. */
7585 static tree
7586 fold_builtin_cbrt (tree arg, tree type)
7588 const enum built_in_function fcode = builtin_mathfn_code (arg);
7589 tree res;
7591 if (!validate_arg (arg, REAL_TYPE))
7592 return NULL_TREE;
7594 /* Calculate the result when the argument is a constant. */
7595 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7596 return res;
7598 if (flag_unsafe_math_optimizations)
7600 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7601 if (BUILTIN_EXPONENT_P (fcode))
7603 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7604 const REAL_VALUE_TYPE third_trunc =
7605 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_third));
7606 arg = fold_build2 (MULT_EXPR, type,
7607 CALL_EXPR_ARG (arg, 0),
7608 build_real (type, third_trunc));
7609 return build_call_expr (expfn, 1, arg);
7612 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7613 if (BUILTIN_SQRT_P (fcode))
7615 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7617 if (powfn)
7619 tree arg0 = CALL_EXPR_ARG (arg, 0);
7620 tree tree_root;
7621 REAL_VALUE_TYPE dconstroot = *get_real_const (rv_third);
7623 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7624 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7625 tree_root = build_real (type, dconstroot);
7626 return build_call_expr (powfn, 2, arg0, tree_root);
7630 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7631 if (BUILTIN_CBRT_P (fcode))
7633 tree arg0 = CALL_EXPR_ARG (arg, 0);
7634 if (tree_expr_nonnegative_p (arg0))
7636 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7638 if (powfn)
7640 tree tree_root;
7641 REAL_VALUE_TYPE dconstroot;
7643 real_arithmetic (&dconstroot, MULT_EXPR,
7644 get_real_const (rv_third),
7645 get_real_const (rv_third));
7646 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7647 tree_root = build_real (type, dconstroot);
7648 return build_call_expr (powfn, 2, arg0, tree_root);
7653 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7654 if (fcode == BUILT_IN_POW
7655 || fcode == BUILT_IN_POWF
7656 || fcode == BUILT_IN_POWL)
7658 tree arg00 = CALL_EXPR_ARG (arg, 0);
7659 tree arg01 = CALL_EXPR_ARG (arg, 1);
7660 if (tree_expr_nonnegative_p (arg00))
7662 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7663 const REAL_VALUE_TYPE dconstroot
7664 = real_value_truncate (TYPE_MODE (type),
7665 *get_real_const (rv_third));
7666 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7667 build_real (type, dconstroot));
7668 return build_call_expr (powfn, 2, arg00, narg01);
7672 return NULL_TREE;
7675 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7676 TYPE is the type of the return value. Return NULL_TREE if no
7677 simplification can be made. */
7679 static tree
7680 fold_builtin_cos (tree arg, tree type, tree fndecl)
7682 tree res, narg;
7684 if (!validate_arg (arg, REAL_TYPE))
7685 return NULL_TREE;
7687 /* Calculate the result when the argument is a constant. */
7688 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7689 return res;
7691 /* Optimize cos(-x) into cos (x). */
7692 if ((narg = fold_strip_sign_ops (arg)))
7693 return build_call_expr (fndecl, 1, narg);
7695 return NULL_TREE;
7698 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7699 Return NULL_TREE if no simplification can be made. */
7701 static tree
7702 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7704 if (validate_arg (arg, REAL_TYPE))
7706 tree res, narg;
7708 /* Calculate the result when the argument is a constant. */
7709 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7710 return res;
7712 /* Optimize cosh(-x) into cosh (x). */
7713 if ((narg = fold_strip_sign_ops (arg)))
7714 return build_call_expr (fndecl, 1, narg);
7717 return NULL_TREE;
7720 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7721 Return NULL_TREE if no simplification can be made. */
7723 static tree
7724 fold_builtin_tan (tree arg, tree type)
7726 enum built_in_function fcode;
7727 tree res;
7729 if (!validate_arg (arg, REAL_TYPE))
7730 return NULL_TREE;
7732 /* Calculate the result when the argument is a constant. */
7733 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7734 return res;
7736 /* Optimize tan(atan(x)) = x. */
7737 fcode = builtin_mathfn_code (arg);
7738 if (flag_unsafe_math_optimizations
7739 && (fcode == BUILT_IN_ATAN
7740 || fcode == BUILT_IN_ATANF
7741 || fcode == BUILT_IN_ATANL))
7742 return CALL_EXPR_ARG (arg, 0);
7744 return NULL_TREE;
7747 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7748 NULL_TREE if no simplification can be made. */
7750 static tree
7751 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7753 tree type;
7754 tree res, fn, call;
7756 if (!validate_arg (arg0, REAL_TYPE)
7757 || !validate_arg (arg1, POINTER_TYPE)
7758 || !validate_arg (arg2, POINTER_TYPE))
7759 return NULL_TREE;
7761 type = TREE_TYPE (arg0);
7763 /* Calculate the result when the argument is a constant. */
7764 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7765 return res;
7767 /* Canonicalize sincos to cexpi. */
7768 if (!TARGET_C99_FUNCTIONS)
7769 return NULL_TREE;
7770 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7771 if (!fn)
7772 return NULL_TREE;
7774 call = build_call_expr (fn, 1, arg0);
7775 call = builtin_save_expr (call);
7777 return build2 (COMPOUND_EXPR, type,
7778 build2 (MODIFY_EXPR, void_type_node,
7779 build_fold_indirect_ref (arg1),
7780 build1 (IMAGPART_EXPR, type, call)),
7781 build2 (MODIFY_EXPR, void_type_node,
7782 build_fold_indirect_ref (arg2),
7783 build1 (REALPART_EXPR, type, call)));
7786 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7787 NULL_TREE if no simplification can be made. */
7789 static tree
7790 fold_builtin_cexp (tree arg0, tree type)
7792 tree rtype;
7793 tree realp, imagp, ifn;
7795 if (!validate_arg (arg0, COMPLEX_TYPE))
7796 return NULL_TREE;
7798 rtype = TREE_TYPE (TREE_TYPE (arg0));
7800 /* In case we can figure out the real part of arg0 and it is constant zero
7801 fold to cexpi. */
7802 if (!TARGET_C99_FUNCTIONS)
7803 return NULL_TREE;
7804 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7805 if (!ifn)
7806 return NULL_TREE;
7808 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7809 && real_zerop (realp))
7811 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7812 return build_call_expr (ifn, 1, narg);
7815 /* In case we can easily decompose real and imaginary parts split cexp
7816 to exp (r) * cexpi (i). */
7817 if (flag_unsafe_math_optimizations
7818 && realp)
7820 tree rfn, rcall, icall;
7822 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7823 if (!rfn)
7824 return NULL_TREE;
7826 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7827 if (!imagp)
7828 return NULL_TREE;
7830 icall = build_call_expr (ifn, 1, imagp);
7831 icall = builtin_save_expr (icall);
7832 rcall = build_call_expr (rfn, 1, realp);
7833 rcall = builtin_save_expr (rcall);
7834 return fold_build2 (COMPLEX_EXPR, type,
7835 fold_build2 (MULT_EXPR, rtype,
7836 rcall,
7837 fold_build1 (REALPART_EXPR, rtype, icall)),
7838 fold_build2 (MULT_EXPR, rtype,
7839 rcall,
7840 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7843 return NULL_TREE;
7846 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7847 Return NULL_TREE if no simplification can be made. */
7849 static tree
7850 fold_builtin_trunc (tree fndecl, tree arg)
7852 if (!validate_arg (arg, REAL_TYPE))
7853 return NULL_TREE;
7855 /* Optimize trunc of constant value. */
7856 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7858 REAL_VALUE_TYPE r, x;
7859 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7861 x = TREE_REAL_CST (arg);
7862 real_trunc (&r, TYPE_MODE (type), &x);
7863 return build_real (type, r);
7866 return fold_trunc_transparent_mathfn (fndecl, arg);
7869 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7870 Return NULL_TREE if no simplification can be made. */
7872 static tree
7873 fold_builtin_floor (tree fndecl, tree arg)
7875 if (!validate_arg (arg, REAL_TYPE))
7876 return NULL_TREE;
7878 /* Optimize floor of constant value. */
7879 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7881 REAL_VALUE_TYPE x;
7883 x = TREE_REAL_CST (arg);
7884 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7886 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7887 REAL_VALUE_TYPE r;
7889 real_floor (&r, TYPE_MODE (type), &x);
7890 return build_real (type, r);
7894 /* Fold floor (x) where x is nonnegative to trunc (x). */
7895 if (tree_expr_nonnegative_p (arg))
7897 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7898 if (truncfn)
7899 return build_call_expr (truncfn, 1, arg);
7902 return fold_trunc_transparent_mathfn (fndecl, arg);
7905 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7906 Return NULL_TREE if no simplification can be made. */
7908 static tree
7909 fold_builtin_ceil (tree fndecl, tree arg)
7911 if (!validate_arg (arg, REAL_TYPE))
7912 return NULL_TREE;
7914 /* Optimize ceil of constant value. */
7915 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7917 REAL_VALUE_TYPE x;
7919 x = TREE_REAL_CST (arg);
7920 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7922 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7923 REAL_VALUE_TYPE r;
7925 real_ceil (&r, TYPE_MODE (type), &x);
7926 return build_real (type, r);
7930 return fold_trunc_transparent_mathfn (fndecl, arg);
7933 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7934 Return NULL_TREE if no simplification can be made. */
7936 static tree
7937 fold_builtin_round (tree fndecl, tree arg)
7939 if (!validate_arg (arg, REAL_TYPE))
7940 return NULL_TREE;
7942 /* Optimize round of constant value. */
7943 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7945 REAL_VALUE_TYPE x;
7947 x = TREE_REAL_CST (arg);
7948 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7950 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7951 REAL_VALUE_TYPE r;
7953 real_round (&r, TYPE_MODE (type), &x);
7954 return build_real (type, r);
7958 return fold_trunc_transparent_mathfn (fndecl, arg);
7961 /* Fold function call to builtin lround, lroundf or lroundl (or the
7962 corresponding long long versions) and other rounding functions. ARG
7963 is the argument to the call. Return NULL_TREE if no simplification
7964 can be made. */
7966 static tree
7967 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7969 if (!validate_arg (arg, REAL_TYPE))
7970 return NULL_TREE;
7972 /* Optimize lround of constant value. */
7973 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7975 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7977 if (real_isfinite (&x))
7979 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7980 tree ftype = TREE_TYPE (arg);
7981 unsigned HOST_WIDE_INT lo2;
7982 HOST_WIDE_INT hi, lo;
7983 REAL_VALUE_TYPE r;
7985 switch (DECL_FUNCTION_CODE (fndecl))
7987 CASE_FLT_FN (BUILT_IN_LFLOOR):
7988 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7989 real_floor (&r, TYPE_MODE (ftype), &x);
7990 break;
7992 CASE_FLT_FN (BUILT_IN_LCEIL):
7993 CASE_FLT_FN (BUILT_IN_LLCEIL):
7994 real_ceil (&r, TYPE_MODE (ftype), &x);
7995 break;
7997 CASE_FLT_FN (BUILT_IN_LROUND):
7998 CASE_FLT_FN (BUILT_IN_LLROUND):
7999 real_round (&r, TYPE_MODE (ftype), &x);
8000 break;
8002 default:
8003 gcc_unreachable ();
8006 REAL_VALUE_TO_INT (&lo, &hi, r);
8007 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
8008 return build_int_cst_wide (itype, lo2, hi);
8012 switch (DECL_FUNCTION_CODE (fndecl))
8014 CASE_FLT_FN (BUILT_IN_LFLOOR):
8015 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8016 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8017 if (tree_expr_nonnegative_p (arg))
8018 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
8019 arg);
8020 break;
8021 default:;
8024 return fold_fixed_mathfn (fndecl, arg);
8027 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8028 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8029 the argument to the call. Return NULL_TREE if no simplification can
8030 be made. */
8032 static tree
8033 fold_builtin_bitop (tree fndecl, tree arg)
8035 if (!validate_arg (arg, INTEGER_TYPE))
8036 return NULL_TREE;
8038 /* Optimize for constant argument. */
8039 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8041 HOST_WIDE_INT hi, width, result;
8042 unsigned HOST_WIDE_INT lo;
8043 tree type;
8045 type = TREE_TYPE (arg);
8046 width = TYPE_PRECISION (type);
8047 lo = TREE_INT_CST_LOW (arg);
8049 /* Clear all the bits that are beyond the type's precision. */
8050 if (width > HOST_BITS_PER_WIDE_INT)
8052 hi = TREE_INT_CST_HIGH (arg);
8053 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8054 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8056 else
8058 hi = 0;
8059 if (width < HOST_BITS_PER_WIDE_INT)
8060 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8063 switch (DECL_FUNCTION_CODE (fndecl))
8065 CASE_INT_FN (BUILT_IN_FFS):
8066 if (lo != 0)
8067 result = exact_log2 (lo & -lo) + 1;
8068 else if (hi != 0)
8069 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8070 else
8071 result = 0;
8072 break;
8074 CASE_INT_FN (BUILT_IN_CLZ):
8075 if (hi != 0)
8076 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8077 else if (lo != 0)
8078 result = width - floor_log2 (lo) - 1;
8079 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8080 result = width;
8081 break;
8083 CASE_INT_FN (BUILT_IN_CTZ):
8084 if (lo != 0)
8085 result = exact_log2 (lo & -lo);
8086 else if (hi != 0)
8087 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8088 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8089 result = width;
8090 break;
8092 CASE_INT_FN (BUILT_IN_POPCOUNT):
8093 result = 0;
8094 while (lo)
8095 result++, lo &= lo - 1;
8096 while (hi)
8097 result++, hi &= hi - 1;
8098 break;
8100 CASE_INT_FN (BUILT_IN_PARITY):
8101 result = 0;
8102 while (lo)
8103 result++, lo &= lo - 1;
8104 while (hi)
8105 result++, hi &= hi - 1;
8106 result &= 1;
8107 break;
8109 default:
8110 gcc_unreachable ();
8113 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8116 return NULL_TREE;
8119 /* Fold function call to builtin_bswap and the long and long long
8120 variants. Return NULL_TREE if no simplification can be made. */
8121 static tree
8122 fold_builtin_bswap (tree fndecl, tree arg)
8124 if (! validate_arg (arg, INTEGER_TYPE))
8125 return NULL_TREE;
8127 /* Optimize constant value. */
8128 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8130 HOST_WIDE_INT hi, width, r_hi = 0;
8131 unsigned HOST_WIDE_INT lo, r_lo = 0;
8132 tree type;
8134 type = TREE_TYPE (arg);
8135 width = TYPE_PRECISION (type);
8136 lo = TREE_INT_CST_LOW (arg);
8137 hi = TREE_INT_CST_HIGH (arg);
8139 switch (DECL_FUNCTION_CODE (fndecl))
8141 case BUILT_IN_BSWAP32:
8142 case BUILT_IN_BSWAP64:
8144 int s;
8146 for (s = 0; s < width; s += 8)
8148 int d = width - s - 8;
8149 unsigned HOST_WIDE_INT byte;
8151 if (s < HOST_BITS_PER_WIDE_INT)
8152 byte = (lo >> s) & 0xff;
8153 else
8154 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8156 if (d < HOST_BITS_PER_WIDE_INT)
8157 r_lo |= byte << d;
8158 else
8159 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8163 break;
8165 default:
8166 gcc_unreachable ();
8169 if (width < HOST_BITS_PER_WIDE_INT)
8170 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8171 else
8172 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8175 return NULL_TREE;
8178 /* Return true if EXPR is the real constant contained in VALUE. */
8180 static bool
8181 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8183 STRIP_NOPS (expr);
8185 return ((TREE_CODE (expr) == REAL_CST
8186 && !TREE_OVERFLOW (expr)
8187 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8188 || (TREE_CODE (expr) == COMPLEX_CST
8189 && real_dconstp (TREE_REALPART (expr), value)
8190 && real_zerop (TREE_IMAGPART (expr))));
8193 /* A subroutine of fold_builtin to fold the various logarithmic
8194 functions. Return NULL_TREE if no simplification can me made.
8195 FUNC is the corresponding MPFR logarithm function. */
8197 static tree
8198 fold_builtin_logarithm (tree fndecl, tree arg,
8199 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8201 if (validate_arg (arg, REAL_TYPE))
8203 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8204 tree res;
8205 const enum built_in_function fcode = builtin_mathfn_code (arg);
8207 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8208 instead we'll look for 'e' truncated to MODE. So only do
8209 this if flag_unsafe_math_optimizations is set. */
8210 if (flag_unsafe_math_optimizations && func == mpfr_log)
8212 const REAL_VALUE_TYPE e_truncated =
8213 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_e));
8214 if (real_dconstp (arg, &e_truncated))
8215 return build_real (type, dconst1);
8218 /* Calculate the result when the argument is a constant. */
8219 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8220 return res;
8222 /* Special case, optimize logN(expN(x)) = x. */
8223 if (flag_unsafe_math_optimizations
8224 && ((func == mpfr_log
8225 && (fcode == BUILT_IN_EXP
8226 || fcode == BUILT_IN_EXPF
8227 || fcode == BUILT_IN_EXPL))
8228 || (func == mpfr_log2
8229 && (fcode == BUILT_IN_EXP2
8230 || fcode == BUILT_IN_EXP2F
8231 || fcode == BUILT_IN_EXP2L))
8232 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8233 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8235 /* Optimize logN(func()) for various exponential functions. We
8236 want to determine the value "x" and the power "exponent" in
8237 order to transform logN(x**exponent) into exponent*logN(x). */
8238 if (flag_unsafe_math_optimizations)
8240 tree exponent = 0, x = 0;
8242 switch (fcode)
8244 CASE_FLT_FN (BUILT_IN_EXP):
8245 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8246 x = build_real (type,
8247 real_value_truncate (TYPE_MODE (type),
8248 *get_real_const (rv_e)));
8249 exponent = CALL_EXPR_ARG (arg, 0);
8250 break;
8251 CASE_FLT_FN (BUILT_IN_EXP2):
8252 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8253 x = build_real (type, dconst2);
8254 exponent = CALL_EXPR_ARG (arg, 0);
8255 break;
8256 CASE_FLT_FN (BUILT_IN_EXP10):
8257 CASE_FLT_FN (BUILT_IN_POW10):
8258 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8260 REAL_VALUE_TYPE dconst10;
8261 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8262 x = build_real (type, dconst10);
8264 exponent = CALL_EXPR_ARG (arg, 0);
8265 break;
8266 CASE_FLT_FN (BUILT_IN_SQRT):
8267 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8268 x = CALL_EXPR_ARG (arg, 0);
8269 exponent = build_real (type, dconsthalf);
8270 break;
8271 CASE_FLT_FN (BUILT_IN_CBRT):
8272 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8273 x = CALL_EXPR_ARG (arg, 0);
8274 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8275 *get_real_const (rv_third)));
8276 break;
8277 CASE_FLT_FN (BUILT_IN_POW):
8278 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8279 x = CALL_EXPR_ARG (arg, 0);
8280 exponent = CALL_EXPR_ARG (arg, 1);
8281 break;
8282 default:
8283 break;
8286 /* Now perform the optimization. */
8287 if (x && exponent)
8289 tree logfn = build_call_expr (fndecl, 1, x);
8290 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8295 return NULL_TREE;
8298 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8299 NULL_TREE if no simplification can be made. */
8301 static tree
8302 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8304 tree res, narg0, narg1;
8306 if (!validate_arg (arg0, REAL_TYPE)
8307 || !validate_arg (arg1, REAL_TYPE))
8308 return NULL_TREE;
8310 /* Calculate the result when the argument is a constant. */
8311 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8312 return res;
8314 /* If either argument to hypot has a negate or abs, strip that off.
8315 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8316 narg0 = fold_strip_sign_ops (arg0);
8317 narg1 = fold_strip_sign_ops (arg1);
8318 if (narg0 || narg1)
8320 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8321 narg1 ? narg1 : arg1);
8324 /* If either argument is zero, hypot is fabs of the other. */
8325 if (real_zerop (arg0))
8326 return fold_build1 (ABS_EXPR, type, arg1);
8327 else if (real_zerop (arg1))
8328 return fold_build1 (ABS_EXPR, type, arg0);
8330 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8331 if (flag_unsafe_math_optimizations
8332 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8334 const REAL_VALUE_TYPE sqrt2_trunc
8335 = real_value_truncate (TYPE_MODE (type), *get_real_const (rv_sqrt2));
8336 return fold_build2 (MULT_EXPR, type,
8337 fold_build1 (ABS_EXPR, type, arg0),
8338 build_real (type, sqrt2_trunc));
8341 return NULL_TREE;
8345 /* Fold a builtin function call to pow, powf, or powl. Return
8346 NULL_TREE if no simplification can be made. */
8347 static tree
8348 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8350 tree res;
8352 if (!validate_arg (arg0, REAL_TYPE)
8353 || !validate_arg (arg1, REAL_TYPE))
8354 return NULL_TREE;
8356 /* Calculate the result when the argument is a constant. */
8357 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8358 return res;
8360 /* Optimize pow(1.0,y) = 1.0. */
8361 if (real_onep (arg0))
8362 return omit_one_operand (type, build_real (type, dconst1), arg1);
8364 if (TREE_CODE (arg1) == REAL_CST
8365 && !TREE_OVERFLOW (arg1))
8367 REAL_VALUE_TYPE cint;
8368 REAL_VALUE_TYPE c;
8369 HOST_WIDE_INT n;
8371 c = TREE_REAL_CST (arg1);
8373 /* Optimize pow(x,0.0) = 1.0. */
8374 if (REAL_VALUES_EQUAL (c, dconst0))
8375 return omit_one_operand (type, build_real (type, dconst1),
8376 arg0);
8378 /* Optimize pow(x,1.0) = x. */
8379 if (REAL_VALUES_EQUAL (c, dconst1))
8380 return arg0;
8382 /* Optimize pow(x,-1.0) = 1.0/x. */
8383 if (REAL_VALUES_EQUAL (c, dconstm1))
8384 return fold_build2 (RDIV_EXPR, type,
8385 build_real (type, dconst1), arg0);
8387 /* Optimize pow(x,0.5) = sqrt(x). */
8388 if (flag_unsafe_math_optimizations
8389 && REAL_VALUES_EQUAL (c, dconsthalf))
8391 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8393 if (sqrtfn != NULL_TREE)
8394 return build_call_expr (sqrtfn, 1, arg0);
8397 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8398 if (flag_unsafe_math_optimizations)
8400 const REAL_VALUE_TYPE dconstroot
8401 = real_value_truncate (TYPE_MODE (type),
8402 *get_real_const (rv_third));
8404 if (REAL_VALUES_EQUAL (c, dconstroot))
8406 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8407 if (cbrtfn != NULL_TREE)
8408 return build_call_expr (cbrtfn, 1, arg0);
8412 /* Check for an integer exponent. */
8413 n = real_to_integer (&c);
8414 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8415 if (real_identical (&c, &cint))
8417 /* Attempt to evaluate pow at compile-time. */
8418 if (TREE_CODE (arg0) == REAL_CST
8419 && !TREE_OVERFLOW (arg0))
8421 REAL_VALUE_TYPE x;
8422 bool inexact;
8424 x = TREE_REAL_CST (arg0);
8425 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8426 if (flag_unsafe_math_optimizations || !inexact)
8427 return build_real (type, x);
8430 /* Strip sign ops from even integer powers. */
8431 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8433 tree narg0 = fold_strip_sign_ops (arg0);
8434 if (narg0)
8435 return build_call_expr (fndecl, 2, narg0, arg1);
8440 if (flag_unsafe_math_optimizations)
8442 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8444 /* Optimize pow(expN(x),y) = expN(x*y). */
8445 if (BUILTIN_EXPONENT_P (fcode))
8447 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8448 tree arg = CALL_EXPR_ARG (arg0, 0);
8449 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8450 return build_call_expr (expfn, 1, arg);
8453 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8454 if (BUILTIN_SQRT_P (fcode))
8456 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8457 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8458 build_real (type, dconsthalf));
8459 return build_call_expr (fndecl, 2, narg0, narg1);
8462 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8463 if (BUILTIN_CBRT_P (fcode))
8465 tree arg = CALL_EXPR_ARG (arg0, 0);
8466 if (tree_expr_nonnegative_p (arg))
8468 const REAL_VALUE_TYPE dconstroot
8469 = real_value_truncate (TYPE_MODE (type),
8470 *get_real_const (rv_third));
8471 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8472 build_real (type, dconstroot));
8473 return build_call_expr (fndecl, 2, arg, narg1);
8477 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8478 if (fcode == BUILT_IN_POW
8479 || fcode == BUILT_IN_POWF
8480 || fcode == BUILT_IN_POWL)
8482 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8483 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8484 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8485 return build_call_expr (fndecl, 2, arg00, narg1);
8489 return NULL_TREE;
8492 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8493 Return NULL_TREE if no simplification can be made. */
8494 static tree
8495 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8496 tree arg0, tree arg1, tree type)
8498 if (!validate_arg (arg0, REAL_TYPE)
8499 || !validate_arg (arg1, INTEGER_TYPE))
8500 return NULL_TREE;
8502 /* Optimize pow(1.0,y) = 1.0. */
8503 if (real_onep (arg0))
8504 return omit_one_operand (type, build_real (type, dconst1), arg1);
8506 if (host_integerp (arg1, 0))
8508 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8510 /* Evaluate powi at compile-time. */
8511 if (TREE_CODE (arg0) == REAL_CST
8512 && !TREE_OVERFLOW (arg0))
8514 REAL_VALUE_TYPE x;
8515 x = TREE_REAL_CST (arg0);
8516 real_powi (&x, TYPE_MODE (type), &x, c);
8517 return build_real (type, x);
8520 /* Optimize pow(x,0) = 1.0. */
8521 if (c == 0)
8522 return omit_one_operand (type, build_real (type, dconst1),
8523 arg0);
8525 /* Optimize pow(x,1) = x. */
8526 if (c == 1)
8527 return arg0;
8529 /* Optimize pow(x,-1) = 1.0/x. */
8530 if (c == -1)
8531 return fold_build2 (RDIV_EXPR, type,
8532 build_real (type, dconst1), arg0);
8535 return NULL_TREE;
8538 /* A subroutine of fold_builtin to fold the various exponent
8539 functions. Return NULL_TREE if no simplification can be made.
8540 FUNC is the corresponding MPFR exponent function. */
8542 static tree
8543 fold_builtin_exponent (tree fndecl, tree arg,
8544 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8546 if (validate_arg (arg, REAL_TYPE))
8548 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8549 tree res;
8551 /* Calculate the result when the argument is a constant. */
8552 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8553 return res;
8555 /* Optimize expN(logN(x)) = x. */
8556 if (flag_unsafe_math_optimizations)
8558 const enum built_in_function fcode = builtin_mathfn_code (arg);
8560 if ((func == mpfr_exp
8561 && (fcode == BUILT_IN_LOG
8562 || fcode == BUILT_IN_LOGF
8563 || fcode == BUILT_IN_LOGL))
8564 || (func == mpfr_exp2
8565 && (fcode == BUILT_IN_LOG2
8566 || fcode == BUILT_IN_LOG2F
8567 || fcode == BUILT_IN_LOG2L))
8568 || (func == mpfr_exp10
8569 && (fcode == BUILT_IN_LOG10
8570 || fcode == BUILT_IN_LOG10F
8571 || fcode == BUILT_IN_LOG10L)))
8572 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8576 return NULL_TREE;
8579 /* Return true if VAR is a VAR_DECL or a component thereof. */
8581 static bool
8582 var_decl_component_p (tree var)
8584 tree inner = var;
8585 while (handled_component_p (inner))
8586 inner = TREE_OPERAND (inner, 0);
8587 return SSA_VAR_P (inner);
8590 /* Fold function call to builtin memset. Return
8591 NULL_TREE if no simplification can be made. */
8593 static tree
8594 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8596 tree var, ret;
8597 unsigned HOST_WIDE_INT length, cval;
8599 if (! validate_arg (dest, POINTER_TYPE)
8600 || ! validate_arg (c, INTEGER_TYPE)
8601 || ! validate_arg (len, INTEGER_TYPE))
8602 return NULL_TREE;
8604 if (! host_integerp (len, 1))
8605 return NULL_TREE;
8607 /* If the LEN parameter is zero, return DEST. */
8608 if (integer_zerop (len))
8609 return omit_one_operand (type, dest, c);
8611 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8612 return NULL_TREE;
8614 var = dest;
8615 STRIP_NOPS (var);
8616 if (TREE_CODE (var) != ADDR_EXPR)
8617 return NULL_TREE;
8619 var = TREE_OPERAND (var, 0);
8620 if (TREE_THIS_VOLATILE (var))
8621 return NULL_TREE;
8623 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8624 && !POINTER_TYPE_P (TREE_TYPE (var)))
8625 return NULL_TREE;
8627 if (! var_decl_component_p (var))
8628 return NULL_TREE;
8630 length = tree_low_cst (len, 1);
8631 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8632 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8633 < (int) length)
8634 return NULL_TREE;
8636 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8637 return NULL_TREE;
8639 if (integer_zerop (c))
8640 cval = 0;
8641 else
8643 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8644 return NULL_TREE;
8646 cval = tree_low_cst (c, 1);
8647 cval &= 0xff;
8648 cval |= cval << 8;
8649 cval |= cval << 16;
8650 cval |= (cval << 31) << 1;
8653 ret = build_int_cst_type (TREE_TYPE (var), cval);
8654 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8655 if (ignore)
8656 return ret;
8658 return omit_one_operand (type, dest, ret);
8661 /* Fold function call to builtin memset. Return
8662 NULL_TREE if no simplification can be made. */
8664 static tree
8665 fold_builtin_bzero (tree dest, tree size, bool ignore)
8667 if (! validate_arg (dest, POINTER_TYPE)
8668 || ! validate_arg (size, INTEGER_TYPE))
8669 return NULL_TREE;
8671 if (!ignore)
8672 return NULL_TREE;
8674 /* New argument list transforming bzero(ptr x, int y) to
8675 memset(ptr x, int 0, size_t y). This is done this way
8676 so that if it isn't expanded inline, we fallback to
8677 calling bzero instead of memset. */
8679 return fold_builtin_memset (dest, integer_zero_node,
8680 fold_convert (sizetype, size),
8681 void_type_node, ignore);
8684 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8685 NULL_TREE if no simplification can be made.
8686 If ENDP is 0, return DEST (like memcpy).
8687 If ENDP is 1, return DEST+LEN (like mempcpy).
8688 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8689 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8690 (memmove). */
8692 static tree
8693 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8695 tree destvar, srcvar, expr;
8697 if (! validate_arg (dest, POINTER_TYPE)
8698 || ! validate_arg (src, POINTER_TYPE)
8699 || ! validate_arg (len, INTEGER_TYPE))
8700 return NULL_TREE;
8702 /* If the LEN parameter is zero, return DEST. */
8703 if (integer_zerop (len))
8704 return omit_one_operand (type, dest, src);
8706 /* If SRC and DEST are the same (and not volatile), return
8707 DEST{,+LEN,+LEN-1}. */
8708 if (operand_equal_p (src, dest, 0))
8709 expr = len;
8710 else
8712 tree srctype, desttype;
8713 if (endp == 3)
8715 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8716 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8718 /* Both DEST and SRC must be pointer types.
8719 ??? This is what old code did. Is the testing for pointer types
8720 really mandatory?
8722 If either SRC is readonly or length is 1, we can use memcpy. */
8723 if (dest_align && src_align
8724 && (readonly_data_expr (src)
8725 || (host_integerp (len, 1)
8726 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8727 tree_low_cst (len, 1)))))
8729 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8730 if (!fn)
8731 return NULL_TREE;
8732 return build_call_expr (fn, 3, dest, src, len);
8734 return NULL_TREE;
8737 if (!host_integerp (len, 0))
8738 return NULL_TREE;
8739 /* FIXME:
8740 This logic lose for arguments like (type *)malloc (sizeof (type)),
8741 since we strip the casts of up to VOID return value from malloc.
8742 Perhaps we ought to inherit type from non-VOID argument here? */
8743 STRIP_NOPS (src);
8744 STRIP_NOPS (dest);
8745 srctype = TREE_TYPE (TREE_TYPE (src));
8746 desttype = TREE_TYPE (TREE_TYPE (dest));
8747 if (!srctype || !desttype
8748 || !TYPE_SIZE_UNIT (srctype)
8749 || !TYPE_SIZE_UNIT (desttype)
8750 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8751 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8752 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8753 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8754 return NULL_TREE;
8756 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8757 < (int) TYPE_ALIGN (desttype)
8758 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8759 < (int) TYPE_ALIGN (srctype)))
8760 return NULL_TREE;
8762 if (!ignore)
8763 dest = builtin_save_expr (dest);
8765 srcvar = build_fold_indirect_ref (src);
8766 if (TREE_THIS_VOLATILE (srcvar))
8767 return NULL_TREE;
8768 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8769 return NULL_TREE;
8770 /* With memcpy, it is possible to bypass aliasing rules, so without
8771 this check i. e. execute/20060930-2.c would be misoptimized, because
8772 it use conflicting alias set to hold argument for the memcpy call.
8773 This check is probably unnecesary with -fno-strict-aliasing.
8774 Similarly for destvar. See also PR29286. */
8775 if (!var_decl_component_p (srcvar)
8776 /* Accept: memcpy (*char_var, "test", 1); that simplify
8777 to char_var='t'; */
8778 || is_gimple_min_invariant (srcvar)
8779 || readonly_data_expr (src))
8780 return NULL_TREE;
8782 destvar = build_fold_indirect_ref (dest);
8783 if (TREE_THIS_VOLATILE (destvar))
8784 return NULL_TREE;
8785 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8786 return NULL_TREE;
8787 if (!var_decl_component_p (destvar))
8788 return NULL_TREE;
8790 if (srctype == desttype
8791 || (gimple_in_ssa_p (cfun)
8792 && useless_type_conversion_p (desttype, srctype)))
8793 expr = srcvar;
8794 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8795 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8796 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8797 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8798 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8799 else
8800 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8801 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8804 if (ignore)
8805 return expr;
8807 if (endp == 0 || endp == 3)
8808 return omit_one_operand (type, dest, expr);
8810 if (expr == len)
8811 expr = NULL_TREE;
8813 if (endp == 2)
8814 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8815 ssize_int (1));
8817 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8818 dest = fold_convert (type, dest);
8819 if (expr)
8820 dest = omit_one_operand (type, dest, expr);
8821 return dest;
8824 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8825 If LEN is not NULL, it represents the length of the string to be
8826 copied. Return NULL_TREE if no simplification can be made. */
8828 tree
8829 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8831 tree fn;
8833 if (!validate_arg (dest, POINTER_TYPE)
8834 || !validate_arg (src, POINTER_TYPE))
8835 return NULL_TREE;
8837 /* If SRC and DEST are the same (and not volatile), return DEST. */
8838 if (operand_equal_p (src, dest, 0))
8839 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8841 if (optimize_size)
8842 return NULL_TREE;
8844 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8845 if (!fn)
8846 return NULL_TREE;
8848 if (!len)
8850 len = c_strlen (src, 1);
8851 if (! len || TREE_SIDE_EFFECTS (len))
8852 return NULL_TREE;
8855 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8856 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8857 build_call_expr (fn, 3, dest, src, len));
8860 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8861 If SLEN is not NULL, it represents the length of the source string.
8862 Return NULL_TREE if no simplification can be made. */
8864 tree
8865 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8867 tree fn;
8869 if (!validate_arg (dest, POINTER_TYPE)
8870 || !validate_arg (src, POINTER_TYPE)
8871 || !validate_arg (len, INTEGER_TYPE))
8872 return NULL_TREE;
8874 /* If the LEN parameter is zero, return DEST. */
8875 if (integer_zerop (len))
8876 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8878 /* We can't compare slen with len as constants below if len is not a
8879 constant. */
8880 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8881 return NULL_TREE;
8883 if (!slen)
8884 slen = c_strlen (src, 1);
8886 /* Now, we must be passed a constant src ptr parameter. */
8887 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8888 return NULL_TREE;
8890 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8892 /* We do not support simplification of this case, though we do
8893 support it when expanding trees into RTL. */
8894 /* FIXME: generate a call to __builtin_memset. */
8895 if (tree_int_cst_lt (slen, len))
8896 return NULL_TREE;
8898 /* OK transform into builtin memcpy. */
8899 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8900 if (!fn)
8901 return NULL_TREE;
8902 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8903 build_call_expr (fn, 3, dest, src, len));
8906 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8907 arguments to the call, and TYPE is its return type.
8908 Return NULL_TREE if no simplification can be made. */
8910 static tree
8911 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8913 if (!validate_arg (arg1, POINTER_TYPE)
8914 || !validate_arg (arg2, INTEGER_TYPE)
8915 || !validate_arg (len, INTEGER_TYPE))
8916 return NULL_TREE;
8917 else
8919 const char *p1;
8921 if (TREE_CODE (arg2) != INTEGER_CST
8922 || !host_integerp (len, 1))
8923 return NULL_TREE;
8925 p1 = c_getstr (arg1);
8926 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8928 char c;
8929 const char *r;
8930 tree tem;
8932 if (target_char_cast (arg2, &c))
8933 return NULL_TREE;
8935 r = memchr (p1, c, tree_low_cst (len, 1));
8937 if (r == NULL)
8938 return build_int_cst (TREE_TYPE (arg1), 0);
8940 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8941 size_int (r - p1));
8942 return fold_convert (type, tem);
8944 return NULL_TREE;
8948 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8949 Return NULL_TREE if no simplification can be made. */
8951 static tree
8952 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8954 const char *p1, *p2;
8956 if (!validate_arg (arg1, POINTER_TYPE)
8957 || !validate_arg (arg2, POINTER_TYPE)
8958 || !validate_arg (len, INTEGER_TYPE))
8959 return NULL_TREE;
8961 /* If the LEN parameter is zero, return zero. */
8962 if (integer_zerop (len))
8963 return omit_two_operands (integer_type_node, integer_zero_node,
8964 arg1, arg2);
8966 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8967 if (operand_equal_p (arg1, arg2, 0))
8968 return omit_one_operand (integer_type_node, integer_zero_node, len);
8970 p1 = c_getstr (arg1);
8971 p2 = c_getstr (arg2);
8973 /* If all arguments are constant, and the value of len is not greater
8974 than the lengths of arg1 and arg2, evaluate at compile-time. */
8975 if (host_integerp (len, 1) && p1 && p2
8976 && compare_tree_int (len, strlen (p1) + 1) <= 0
8977 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8979 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8981 if (r > 0)
8982 return integer_one_node;
8983 else if (r < 0)
8984 return integer_minus_one_node;
8985 else
8986 return integer_zero_node;
8989 /* If len parameter is one, return an expression corresponding to
8990 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8991 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8993 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8994 tree cst_uchar_ptr_node
8995 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8997 tree ind1 = fold_convert (integer_type_node,
8998 build1 (INDIRECT_REF, cst_uchar_node,
8999 fold_convert (cst_uchar_ptr_node,
9000 arg1)));
9001 tree ind2 = fold_convert (integer_type_node,
9002 build1 (INDIRECT_REF, cst_uchar_node,
9003 fold_convert (cst_uchar_ptr_node,
9004 arg2)));
9005 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9008 return NULL_TREE;
9011 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9012 Return NULL_TREE if no simplification can be made. */
9014 static tree
9015 fold_builtin_strcmp (tree arg1, tree arg2)
9017 const char *p1, *p2;
9019 if (!validate_arg (arg1, POINTER_TYPE)
9020 || !validate_arg (arg2, POINTER_TYPE))
9021 return NULL_TREE;
9023 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9024 if (operand_equal_p (arg1, arg2, 0))
9025 return integer_zero_node;
9027 p1 = c_getstr (arg1);
9028 p2 = c_getstr (arg2);
9030 if (p1 && p2)
9032 const int i = strcmp (p1, p2);
9033 if (i < 0)
9034 return integer_minus_one_node;
9035 else if (i > 0)
9036 return integer_one_node;
9037 else
9038 return integer_zero_node;
9041 /* If the second arg is "", return *(const unsigned char*)arg1. */
9042 if (p2 && *p2 == '\0')
9044 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9045 tree cst_uchar_ptr_node
9046 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9048 return fold_convert (integer_type_node,
9049 build1 (INDIRECT_REF, cst_uchar_node,
9050 fold_convert (cst_uchar_ptr_node,
9051 arg1)));
9054 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9055 if (p1 && *p1 == '\0')
9057 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9058 tree cst_uchar_ptr_node
9059 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9061 tree temp = fold_convert (integer_type_node,
9062 build1 (INDIRECT_REF, cst_uchar_node,
9063 fold_convert (cst_uchar_ptr_node,
9064 arg2)));
9065 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9068 return NULL_TREE;
9071 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9072 Return NULL_TREE if no simplification can be made. */
9074 static tree
9075 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9077 const char *p1, *p2;
9079 if (!validate_arg (arg1, POINTER_TYPE)
9080 || !validate_arg (arg2, POINTER_TYPE)
9081 || !validate_arg (len, INTEGER_TYPE))
9082 return NULL_TREE;
9084 /* If the LEN parameter is zero, return zero. */
9085 if (integer_zerop (len))
9086 return omit_two_operands (integer_type_node, integer_zero_node,
9087 arg1, arg2);
9089 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9090 if (operand_equal_p (arg1, arg2, 0))
9091 return omit_one_operand (integer_type_node, integer_zero_node, len);
9093 p1 = c_getstr (arg1);
9094 p2 = c_getstr (arg2);
9096 if (host_integerp (len, 1) && p1 && p2)
9098 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9099 if (i > 0)
9100 return integer_one_node;
9101 else if (i < 0)
9102 return integer_minus_one_node;
9103 else
9104 return integer_zero_node;
9107 /* If the second arg is "", and the length is greater than zero,
9108 return *(const unsigned char*)arg1. */
9109 if (p2 && *p2 == '\0'
9110 && TREE_CODE (len) == INTEGER_CST
9111 && tree_int_cst_sgn (len) == 1)
9113 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9114 tree cst_uchar_ptr_node
9115 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9117 return fold_convert (integer_type_node,
9118 build1 (INDIRECT_REF, cst_uchar_node,
9119 fold_convert (cst_uchar_ptr_node,
9120 arg1)));
9123 /* If the first arg is "", and the length is greater than zero,
9124 return -*(const unsigned char*)arg2. */
9125 if (p1 && *p1 == '\0'
9126 && TREE_CODE (len) == INTEGER_CST
9127 && tree_int_cst_sgn (len) == 1)
9129 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9130 tree cst_uchar_ptr_node
9131 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9133 tree temp = fold_convert (integer_type_node,
9134 build1 (INDIRECT_REF, cst_uchar_node,
9135 fold_convert (cst_uchar_ptr_node,
9136 arg2)));
9137 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9140 /* If len parameter is one, return an expression corresponding to
9141 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9142 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9144 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9145 tree cst_uchar_ptr_node
9146 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9148 tree ind1 = fold_convert (integer_type_node,
9149 build1 (INDIRECT_REF, cst_uchar_node,
9150 fold_convert (cst_uchar_ptr_node,
9151 arg1)));
9152 tree ind2 = fold_convert (integer_type_node,
9153 build1 (INDIRECT_REF, cst_uchar_node,
9154 fold_convert (cst_uchar_ptr_node,
9155 arg2)));
9156 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9159 return NULL_TREE;
9162 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9163 ARG. Return NULL_TREE if no simplification can be made. */
9165 static tree
9166 fold_builtin_signbit (tree arg, tree type)
9168 tree temp;
9170 if (!validate_arg (arg, REAL_TYPE))
9171 return NULL_TREE;
9173 /* If ARG is a compile-time constant, determine the result. */
9174 if (TREE_CODE (arg) == REAL_CST
9175 && !TREE_OVERFLOW (arg))
9177 REAL_VALUE_TYPE c;
9179 c = TREE_REAL_CST (arg);
9180 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9181 return fold_convert (type, temp);
9184 /* If ARG is non-negative, the result is always zero. */
9185 if (tree_expr_nonnegative_p (arg))
9186 return omit_one_operand (type, integer_zero_node, arg);
9188 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9189 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9190 return fold_build2 (LT_EXPR, type, arg,
9191 build_real (TREE_TYPE (arg), dconst0));
9193 return NULL_TREE;
9196 /* Fold function call to builtin copysign, copysignf or copysignl with
9197 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9198 be made. */
9200 static tree
9201 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9203 tree tem;
9205 if (!validate_arg (arg1, REAL_TYPE)
9206 || !validate_arg (arg2, REAL_TYPE))
9207 return NULL_TREE;
9209 /* copysign(X,X) is X. */
9210 if (operand_equal_p (arg1, arg2, 0))
9211 return fold_convert (type, arg1);
9213 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9214 if (TREE_CODE (arg1) == REAL_CST
9215 && TREE_CODE (arg2) == REAL_CST
9216 && !TREE_OVERFLOW (arg1)
9217 && !TREE_OVERFLOW (arg2))
9219 REAL_VALUE_TYPE c1, c2;
9221 c1 = TREE_REAL_CST (arg1);
9222 c2 = TREE_REAL_CST (arg2);
9223 /* c1.sign := c2.sign. */
9224 real_copysign (&c1, &c2);
9225 return build_real (type, c1);
9228 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9229 Remember to evaluate Y for side-effects. */
9230 if (tree_expr_nonnegative_p (arg2))
9231 return omit_one_operand (type,
9232 fold_build1 (ABS_EXPR, type, arg1),
9233 arg2);
9235 /* Strip sign changing operations for the first argument. */
9236 tem = fold_strip_sign_ops (arg1);
9237 if (tem)
9238 return build_call_expr (fndecl, 2, tem, arg2);
9240 return NULL_TREE;
9243 /* Fold a call to builtin isascii with argument ARG. */
9245 static tree
9246 fold_builtin_isascii (tree arg)
9248 if (!validate_arg (arg, INTEGER_TYPE))
9249 return NULL_TREE;
9250 else
9252 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9253 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9254 build_int_cst (NULL_TREE,
9255 ~ (unsigned HOST_WIDE_INT) 0x7f));
9256 return fold_build2 (EQ_EXPR, integer_type_node,
9257 arg, integer_zero_node);
9261 /* Fold a call to builtin toascii with argument ARG. */
9263 static tree
9264 fold_builtin_toascii (tree arg)
9266 if (!validate_arg (arg, INTEGER_TYPE))
9267 return NULL_TREE;
9269 /* Transform toascii(c) -> (c & 0x7f). */
9270 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9271 build_int_cst (NULL_TREE, 0x7f));
9274 /* Fold a call to builtin isdigit with argument ARG. */
9276 static tree
9277 fold_builtin_isdigit (tree arg)
9279 if (!validate_arg (arg, INTEGER_TYPE))
9280 return NULL_TREE;
9281 else
9283 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9284 /* According to the C standard, isdigit is unaffected by locale.
9285 However, it definitely is affected by the target character set. */
9286 unsigned HOST_WIDE_INT target_digit0
9287 = lang_hooks.to_target_charset ('0');
9289 if (target_digit0 == 0)
9290 return NULL_TREE;
9292 arg = fold_convert (unsigned_type_node, arg);
9293 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9294 build_int_cst (unsigned_type_node, target_digit0));
9295 return fold_build2 (LE_EXPR, integer_type_node, arg,
9296 build_int_cst (unsigned_type_node, 9));
9300 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9302 static tree
9303 fold_builtin_fabs (tree arg, tree type)
9305 if (!validate_arg (arg, REAL_TYPE))
9306 return NULL_TREE;
9308 arg = fold_convert (type, arg);
9309 if (TREE_CODE (arg) == REAL_CST)
9310 return fold_abs_const (arg, type);
9311 return fold_build1 (ABS_EXPR, type, arg);
9314 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9316 static tree
9317 fold_builtin_abs (tree arg, tree type)
9319 if (!validate_arg (arg, INTEGER_TYPE))
9320 return NULL_TREE;
9322 arg = fold_convert (type, arg);
9323 if (TREE_CODE (arg) == INTEGER_CST)
9324 return fold_abs_const (arg, type);
9325 return fold_build1 (ABS_EXPR, type, arg);
9328 /* Fold a call to builtin fmin or fmax. */
9330 static tree
9331 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9333 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9335 /* Calculate the result when the argument is a constant. */
9336 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9338 if (res)
9339 return res;
9341 /* If either argument is NaN, return the other one. Avoid the
9342 transformation if we get (and honor) a signalling NaN. Using
9343 omit_one_operand() ensures we create a non-lvalue. */
9344 if (TREE_CODE (arg0) == REAL_CST
9345 && real_isnan (&TREE_REAL_CST (arg0))
9346 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9347 || ! TREE_REAL_CST (arg0).signalling))
9348 return omit_one_operand (type, arg1, arg0);
9349 if (TREE_CODE (arg1) == REAL_CST
9350 && real_isnan (&TREE_REAL_CST (arg1))
9351 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9352 || ! TREE_REAL_CST (arg1).signalling))
9353 return omit_one_operand (type, arg0, arg1);
9355 /* Transform fmin/fmax(x,x) -> x. */
9356 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9357 return omit_one_operand (type, arg0, arg1);
9359 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9360 functions to return the numeric arg if the other one is NaN.
9361 These tree codes don't honor that, so only transform if
9362 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9363 handled, so we don't have to worry about it either. */
9364 if (flag_finite_math_only)
9365 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9366 fold_convert (type, arg0),
9367 fold_convert (type, arg1));
9369 return NULL_TREE;
9372 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9374 static tree
9375 fold_builtin_carg (tree arg, tree type)
9377 if (validate_arg (arg, COMPLEX_TYPE))
9379 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9381 if (atan2_fn)
9383 tree new_arg = builtin_save_expr (arg);
9384 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9385 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9386 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9390 return NULL_TREE;
9393 /* Fold a call to builtin logb/ilogb. */
9395 static tree
9396 fold_builtin_logb (tree arg, tree rettype)
9398 if (! validate_arg (arg, REAL_TYPE))
9399 return NULL_TREE;
9401 STRIP_NOPS (arg);
9403 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9405 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9407 switch (value->cl)
9409 case rvc_nan:
9410 case rvc_inf:
9411 /* If arg is Inf or NaN and we're logb, return it. */
9412 if (TREE_CODE (rettype) == REAL_TYPE)
9413 return fold_convert (rettype, arg);
9414 /* Fall through... */
9415 case rvc_zero:
9416 /* Zero may set errno and/or raise an exception for logb, also
9417 for ilogb we don't know FP_ILOGB0. */
9418 return NULL_TREE;
9419 case rvc_normal:
9420 /* For normal numbers, proceed iff radix == 2. In GCC,
9421 normalized significands are in the range [0.5, 1.0). We
9422 want the exponent as if they were [1.0, 2.0) so get the
9423 exponent and subtract 1. */
9424 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9425 return fold_convert (rettype, build_int_cst (NULL_TREE,
9426 REAL_EXP (value)-1));
9427 break;
9431 return NULL_TREE;
9434 /* Fold a call to builtin significand, if radix == 2. */
9436 static tree
9437 fold_builtin_significand (tree arg, tree rettype)
9439 if (! validate_arg (arg, REAL_TYPE))
9440 return NULL_TREE;
9442 STRIP_NOPS (arg);
9444 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9446 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9448 switch (value->cl)
9450 case rvc_zero:
9451 case rvc_nan:
9452 case rvc_inf:
9453 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9454 return fold_convert (rettype, arg);
9455 case rvc_normal:
9456 /* For normal numbers, proceed iff radix == 2. */
9457 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9459 REAL_VALUE_TYPE result = *value;
9460 /* In GCC, normalized significands are in the range [0.5,
9461 1.0). We want them to be [1.0, 2.0) so set the
9462 exponent to 1. */
9463 SET_REAL_EXP (&result, 1);
9464 return build_real (rettype, result);
9466 break;
9470 return NULL_TREE;
9473 /* Fold a call to builtin frexp, we can assume the base is 2. */
9475 static tree
9476 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9478 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9479 return NULL_TREE;
9481 STRIP_NOPS (arg0);
9483 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9484 return NULL_TREE;
9486 arg1 = build_fold_indirect_ref (arg1);
9488 /* Proceed if a valid pointer type was passed in. */
9489 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9491 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9492 tree frac, exp;
9494 switch (value->cl)
9496 case rvc_zero:
9497 /* For +-0, return (*exp = 0, +-0). */
9498 exp = integer_zero_node;
9499 frac = arg0;
9500 break;
9501 case rvc_nan:
9502 case rvc_inf:
9503 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9504 return omit_one_operand (rettype, arg0, arg1);
9505 case rvc_normal:
9507 /* Since the frexp function always expects base 2, and in
9508 GCC normalized significands are already in the range
9509 [0.5, 1.0), we have exactly what frexp wants. */
9510 REAL_VALUE_TYPE frac_rvt = *value;
9511 SET_REAL_EXP (&frac_rvt, 0);
9512 frac = build_real (rettype, frac_rvt);
9513 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9515 break;
9516 default:
9517 gcc_unreachable ();
9520 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9521 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9522 TREE_SIDE_EFFECTS (arg1) = 1;
9523 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9526 return NULL_TREE;
9529 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9530 then we can assume the base is two. If it's false, then we have to
9531 check the mode of the TYPE parameter in certain cases. */
9533 static tree
9534 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9536 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9538 STRIP_NOPS (arg0);
9539 STRIP_NOPS (arg1);
9541 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9542 if (real_zerop (arg0) || integer_zerop (arg1)
9543 || (TREE_CODE (arg0) == REAL_CST
9544 && !real_isfinite (&TREE_REAL_CST (arg0))))
9545 return omit_one_operand (type, arg0, arg1);
9547 /* If both arguments are constant, then try to evaluate it. */
9548 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9549 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9550 && host_integerp (arg1, 0))
9552 /* Bound the maximum adjustment to twice the range of the
9553 mode's valid exponents. Use abs to ensure the range is
9554 positive as a sanity check. */
9555 const long max_exp_adj = 2 *
9556 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9557 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9559 /* Get the user-requested adjustment. */
9560 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9562 /* The requested adjustment must be inside this range. This
9563 is a preliminary cap to avoid things like overflow, we
9564 may still fail to compute the result for other reasons. */
9565 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9567 REAL_VALUE_TYPE initial_result;
9569 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9571 /* Ensure we didn't overflow. */
9572 if (! real_isinf (&initial_result))
9574 const REAL_VALUE_TYPE trunc_result
9575 = real_value_truncate (TYPE_MODE (type), initial_result);
9577 /* Only proceed if the target mode can hold the
9578 resulting value. */
9579 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9580 return build_real (type, trunc_result);
9586 return NULL_TREE;
9589 /* Fold a call to builtin modf. */
9591 static tree
9592 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9594 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9595 return NULL_TREE;
9597 STRIP_NOPS (arg0);
9599 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9600 return NULL_TREE;
9602 arg1 = build_fold_indirect_ref (arg1);
9604 /* Proceed if a valid pointer type was passed in. */
9605 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9607 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9608 REAL_VALUE_TYPE trunc, frac;
9610 switch (value->cl)
9612 case rvc_nan:
9613 case rvc_zero:
9614 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9615 trunc = frac = *value;
9616 break;
9617 case rvc_inf:
9618 /* For +-Inf, return (*arg1 = arg0, +-0). */
9619 frac = dconst0;
9620 frac.sign = value->sign;
9621 trunc = *value;
9622 break;
9623 case rvc_normal:
9624 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9625 real_trunc (&trunc, VOIDmode, value);
9626 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9627 /* If the original number was negative and already
9628 integral, then the fractional part is -0.0. */
9629 if (value->sign && frac.cl == rvc_zero)
9630 frac.sign = value->sign;
9631 break;
9634 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9635 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9636 build_real (rettype, trunc));
9637 TREE_SIDE_EFFECTS (arg1) = 1;
9638 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9639 build_real (rettype, frac));
9642 return NULL_TREE;
9645 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9646 ARG is the argument for the call. */
9648 static tree
9649 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9651 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9652 REAL_VALUE_TYPE r;
9654 if (!validate_arg (arg, REAL_TYPE))
9655 return NULL_TREE;
9657 switch (builtin_index)
9659 case BUILT_IN_ISINF:
9660 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9661 return omit_one_operand (type, integer_zero_node, arg);
9663 if (TREE_CODE (arg) == REAL_CST)
9665 r = TREE_REAL_CST (arg);
9666 if (real_isinf (&r))
9667 return real_compare (GT_EXPR, &r, &dconst0)
9668 ? integer_one_node : integer_minus_one_node;
9669 else
9670 return integer_zero_node;
9673 return NULL_TREE;
9675 case BUILT_IN_ISINF_SIGN:
9677 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9678 /* In a boolean context, GCC will fold the inner COND_EXPR to
9679 1. So e.g. "if (isinf_sign(x))" would be folded to just
9680 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9681 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9682 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9683 tree tmp = NULL_TREE;
9685 arg = builtin_save_expr (arg);
9687 if (signbit_fn && isinf_fn)
9689 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9690 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9692 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9693 signbit_call, integer_zero_node);
9694 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9695 isinf_call, integer_zero_node);
9697 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9698 integer_minus_one_node, integer_one_node);
9699 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9700 integer_zero_node);
9703 return tmp;
9706 case BUILT_IN_ISFINITE:
9707 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9708 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9709 return omit_one_operand (type, integer_one_node, arg);
9711 if (TREE_CODE (arg) == REAL_CST)
9713 r = TREE_REAL_CST (arg);
9714 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9717 return NULL_TREE;
9719 case BUILT_IN_ISNAN:
9720 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9721 return omit_one_operand (type, integer_zero_node, arg);
9723 if (TREE_CODE (arg) == REAL_CST)
9725 r = TREE_REAL_CST (arg);
9726 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9729 arg = builtin_save_expr (arg);
9730 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9732 default:
9733 gcc_unreachable ();
9737 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9738 This builtin will generate code to return the appropriate floating
9739 point classification depending on the value of the floating point
9740 number passed in. The possible return values must be supplied as
9741 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9742 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9743 one floating point argument which is "type generic". */
9745 static tree
9746 fold_builtin_fpclassify (tree exp)
9748 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9749 arg, type, res, tmp;
9750 enum machine_mode mode;
9751 REAL_VALUE_TYPE r;
9752 char buf[128];
9754 /* Verify the required arguments in the original call. */
9755 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9756 INTEGER_TYPE, INTEGER_TYPE,
9757 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9758 return NULL_TREE;
9760 fp_nan = CALL_EXPR_ARG (exp, 0);
9761 fp_infinite = CALL_EXPR_ARG (exp, 1);
9762 fp_normal = CALL_EXPR_ARG (exp, 2);
9763 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9764 fp_zero = CALL_EXPR_ARG (exp, 4);
9765 arg = CALL_EXPR_ARG (exp, 5);
9766 type = TREE_TYPE (arg);
9767 mode = TYPE_MODE (type);
9768 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9770 /* fpclassify(x) ->
9771 isnan(x) ? FP_NAN :
9772 (fabs(x) == Inf ? FP_INFINITE :
9773 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9774 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9776 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9777 build_real (type, dconst0));
9778 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9780 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9781 real_from_string (&r, buf);
9782 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9783 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
9785 if (HONOR_INFINITIES (mode))
9787 real_inf (&r);
9788 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9789 build_real (type, r));
9790 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
9793 if (HONOR_NANS (mode))
9795 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
9796 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
9799 return res;
9802 /* Fold a call to an unordered comparison function such as
9803 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9804 being called and ARG0 and ARG1 are the arguments for the call.
9805 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9806 the opposite of the desired result. UNORDERED_CODE is used
9807 for modes that can hold NaNs and ORDERED_CODE is used for
9808 the rest. */
9810 static tree
9811 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9812 enum tree_code unordered_code,
9813 enum tree_code ordered_code)
9815 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9816 enum tree_code code;
9817 tree type0, type1;
9818 enum tree_code code0, code1;
9819 tree cmp_type = NULL_TREE;
9821 type0 = TREE_TYPE (arg0);
9822 type1 = TREE_TYPE (arg1);
9824 code0 = TREE_CODE (type0);
9825 code1 = TREE_CODE (type1);
9827 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9828 /* Choose the wider of two real types. */
9829 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9830 ? type0 : type1;
9831 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9832 cmp_type = type0;
9833 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9834 cmp_type = type1;
9836 arg0 = fold_convert (cmp_type, arg0);
9837 arg1 = fold_convert (cmp_type, arg1);
9839 if (unordered_code == UNORDERED_EXPR)
9841 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9842 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9843 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9846 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9847 : ordered_code;
9848 return fold_build1 (TRUTH_NOT_EXPR, type,
9849 fold_build2 (code, type, arg0, arg1));
9852 /* Fold a call to built-in function FNDECL with 0 arguments.
9853 IGNORE is true if the result of the function call is ignored. This
9854 function returns NULL_TREE if no simplification was possible. */
9856 static tree
9857 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9859 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9860 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9861 switch (fcode)
9863 CASE_FLT_FN (BUILT_IN_INF):
9864 case BUILT_IN_INFD32:
9865 case BUILT_IN_INFD64:
9866 case BUILT_IN_INFD128:
9867 return fold_builtin_inf (type, true);
9869 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9870 return fold_builtin_inf (type, false);
9872 case BUILT_IN_CLASSIFY_TYPE:
9873 return fold_builtin_classify_type (NULL_TREE);
9875 default:
9876 break;
9878 return NULL_TREE;
9881 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9882 IGNORE is true if the result of the function call is ignored. This
9883 function returns NULL_TREE if no simplification was possible. */
9885 static tree
9886 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9888 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9889 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9890 switch (fcode)
9893 case BUILT_IN_CONSTANT_P:
9895 tree val = fold_builtin_constant_p (arg0);
9897 /* Gimplification will pull the CALL_EXPR for the builtin out of
9898 an if condition. When not optimizing, we'll not CSE it back.
9899 To avoid link error types of regressions, return false now. */
9900 if (!val && !optimize)
9901 val = integer_zero_node;
9903 return val;
9906 case BUILT_IN_CLASSIFY_TYPE:
9907 return fold_builtin_classify_type (arg0);
9909 case BUILT_IN_STRLEN:
9910 return fold_builtin_strlen (arg0);
9912 CASE_FLT_FN (BUILT_IN_FABS):
9913 return fold_builtin_fabs (arg0, type);
9915 case BUILT_IN_ABS:
9916 case BUILT_IN_LABS:
9917 case BUILT_IN_LLABS:
9918 case BUILT_IN_IMAXABS:
9919 return fold_builtin_abs (arg0, type);
9921 CASE_FLT_FN (BUILT_IN_CONJ):
9922 if (validate_arg (arg0, COMPLEX_TYPE))
9923 return fold_build1 (CONJ_EXPR, type, arg0);
9924 break;
9926 CASE_FLT_FN (BUILT_IN_CREAL):
9927 if (validate_arg (arg0, COMPLEX_TYPE))
9928 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9929 break;
9931 CASE_FLT_FN (BUILT_IN_CIMAG):
9932 if (validate_arg (arg0, COMPLEX_TYPE))
9933 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9934 break;
9936 CASE_FLT_FN (BUILT_IN_CCOS):
9937 CASE_FLT_FN (BUILT_IN_CCOSH):
9938 /* These functions are "even", i.e. f(x) == f(-x). */
9939 if (validate_arg (arg0, COMPLEX_TYPE))
9941 tree narg = fold_strip_sign_ops (arg0);
9942 if (narg)
9943 return build_call_expr (fndecl, 1, narg);
9945 break;
9947 CASE_FLT_FN (BUILT_IN_CABS):
9948 return fold_builtin_cabs (arg0, type, fndecl);
9950 CASE_FLT_FN (BUILT_IN_CARG):
9951 return fold_builtin_carg (arg0, type);
9953 CASE_FLT_FN (BUILT_IN_SQRT):
9954 return fold_builtin_sqrt (arg0, type);
9956 CASE_FLT_FN (BUILT_IN_CBRT):
9957 return fold_builtin_cbrt (arg0, type);
9959 CASE_FLT_FN (BUILT_IN_ASIN):
9960 if (validate_arg (arg0, REAL_TYPE))
9961 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9962 &dconstm1, &dconst1, true);
9963 break;
9965 CASE_FLT_FN (BUILT_IN_ACOS):
9966 if (validate_arg (arg0, REAL_TYPE))
9967 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9968 &dconstm1, &dconst1, true);
9969 break;
9971 CASE_FLT_FN (BUILT_IN_ATAN):
9972 if (validate_arg (arg0, REAL_TYPE))
9973 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9974 break;
9976 CASE_FLT_FN (BUILT_IN_ASINH):
9977 if (validate_arg (arg0, REAL_TYPE))
9978 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9979 break;
9981 CASE_FLT_FN (BUILT_IN_ACOSH):
9982 if (validate_arg (arg0, REAL_TYPE))
9983 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9984 &dconst1, NULL, true);
9985 break;
9987 CASE_FLT_FN (BUILT_IN_ATANH):
9988 if (validate_arg (arg0, REAL_TYPE))
9989 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9990 &dconstm1, &dconst1, false);
9991 break;
9993 CASE_FLT_FN (BUILT_IN_SIN):
9994 if (validate_arg (arg0, REAL_TYPE))
9995 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9996 break;
9998 CASE_FLT_FN (BUILT_IN_COS):
9999 return fold_builtin_cos (arg0, type, fndecl);
10000 break;
10002 CASE_FLT_FN (BUILT_IN_TAN):
10003 return fold_builtin_tan (arg0, type);
10005 CASE_FLT_FN (BUILT_IN_CEXP):
10006 return fold_builtin_cexp (arg0, type);
10008 CASE_FLT_FN (BUILT_IN_CEXPI):
10009 if (validate_arg (arg0, REAL_TYPE))
10010 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10011 break;
10013 CASE_FLT_FN (BUILT_IN_SINH):
10014 if (validate_arg (arg0, REAL_TYPE))
10015 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10016 break;
10018 CASE_FLT_FN (BUILT_IN_COSH):
10019 return fold_builtin_cosh (arg0, type, fndecl);
10021 CASE_FLT_FN (BUILT_IN_TANH):
10022 if (validate_arg (arg0, REAL_TYPE))
10023 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10024 break;
10026 CASE_FLT_FN (BUILT_IN_ERF):
10027 if (validate_arg (arg0, REAL_TYPE))
10028 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10029 break;
10031 CASE_FLT_FN (BUILT_IN_ERFC):
10032 if (validate_arg (arg0, REAL_TYPE))
10033 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10034 break;
10036 CASE_FLT_FN (BUILT_IN_TGAMMA):
10037 if (validate_arg (arg0, REAL_TYPE))
10038 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10039 break;
10041 CASE_FLT_FN (BUILT_IN_EXP):
10042 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10044 CASE_FLT_FN (BUILT_IN_EXP2):
10045 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10047 CASE_FLT_FN (BUILT_IN_EXP10):
10048 CASE_FLT_FN (BUILT_IN_POW10):
10049 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10051 CASE_FLT_FN (BUILT_IN_EXPM1):
10052 if (validate_arg (arg0, REAL_TYPE))
10053 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10054 break;
10056 CASE_FLT_FN (BUILT_IN_LOG):
10057 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10059 CASE_FLT_FN (BUILT_IN_LOG2):
10060 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10062 CASE_FLT_FN (BUILT_IN_LOG10):
10063 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10065 CASE_FLT_FN (BUILT_IN_LOG1P):
10066 if (validate_arg (arg0, REAL_TYPE))
10067 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10068 &dconstm1, NULL, false);
10069 break;
10071 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10072 CASE_FLT_FN (BUILT_IN_J0):
10073 if (validate_arg (arg0, REAL_TYPE))
10074 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10075 NULL, NULL, 0);
10076 break;
10078 CASE_FLT_FN (BUILT_IN_J1):
10079 if (validate_arg (arg0, REAL_TYPE))
10080 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10081 NULL, NULL, 0);
10082 break;
10084 CASE_FLT_FN (BUILT_IN_Y0):
10085 if (validate_arg (arg0, REAL_TYPE))
10086 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10087 &dconst0, NULL, false);
10088 break;
10090 CASE_FLT_FN (BUILT_IN_Y1):
10091 if (validate_arg (arg0, REAL_TYPE))
10092 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10093 &dconst0, NULL, false);
10094 break;
10095 #endif
10097 CASE_FLT_FN (BUILT_IN_NAN):
10098 case BUILT_IN_NAND32:
10099 case BUILT_IN_NAND64:
10100 case BUILT_IN_NAND128:
10101 return fold_builtin_nan (arg0, type, true);
10103 CASE_FLT_FN (BUILT_IN_NANS):
10104 return fold_builtin_nan (arg0, type, false);
10106 CASE_FLT_FN (BUILT_IN_FLOOR):
10107 return fold_builtin_floor (fndecl, arg0);
10109 CASE_FLT_FN (BUILT_IN_CEIL):
10110 return fold_builtin_ceil (fndecl, arg0);
10112 CASE_FLT_FN (BUILT_IN_TRUNC):
10113 return fold_builtin_trunc (fndecl, arg0);
10115 CASE_FLT_FN (BUILT_IN_ROUND):
10116 return fold_builtin_round (fndecl, arg0);
10118 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10119 CASE_FLT_FN (BUILT_IN_RINT):
10120 return fold_trunc_transparent_mathfn (fndecl, arg0);
10122 CASE_FLT_FN (BUILT_IN_LCEIL):
10123 CASE_FLT_FN (BUILT_IN_LLCEIL):
10124 CASE_FLT_FN (BUILT_IN_LFLOOR):
10125 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10126 CASE_FLT_FN (BUILT_IN_LROUND):
10127 CASE_FLT_FN (BUILT_IN_LLROUND):
10128 return fold_builtin_int_roundingfn (fndecl, arg0);
10130 CASE_FLT_FN (BUILT_IN_LRINT):
10131 CASE_FLT_FN (BUILT_IN_LLRINT):
10132 return fold_fixed_mathfn (fndecl, arg0);
10134 case BUILT_IN_BSWAP32:
10135 case BUILT_IN_BSWAP64:
10136 return fold_builtin_bswap (fndecl, arg0);
10138 CASE_INT_FN (BUILT_IN_FFS):
10139 CASE_INT_FN (BUILT_IN_CLZ):
10140 CASE_INT_FN (BUILT_IN_CTZ):
10141 CASE_INT_FN (BUILT_IN_POPCOUNT):
10142 CASE_INT_FN (BUILT_IN_PARITY):
10143 return fold_builtin_bitop (fndecl, arg0);
10145 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10146 return fold_builtin_signbit (arg0, type);
10148 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10149 return fold_builtin_significand (arg0, type);
10151 CASE_FLT_FN (BUILT_IN_ILOGB):
10152 CASE_FLT_FN (BUILT_IN_LOGB):
10153 return fold_builtin_logb (arg0, type);
10155 case BUILT_IN_ISASCII:
10156 return fold_builtin_isascii (arg0);
10158 case BUILT_IN_TOASCII:
10159 return fold_builtin_toascii (arg0);
10161 case BUILT_IN_ISDIGIT:
10162 return fold_builtin_isdigit (arg0);
10164 CASE_FLT_FN (BUILT_IN_FINITE):
10165 case BUILT_IN_FINITED32:
10166 case BUILT_IN_FINITED64:
10167 case BUILT_IN_FINITED128:
10168 case BUILT_IN_ISFINITE:
10169 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10171 CASE_FLT_FN (BUILT_IN_ISINF):
10172 case BUILT_IN_ISINFD32:
10173 case BUILT_IN_ISINFD64:
10174 case BUILT_IN_ISINFD128:
10175 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10177 case BUILT_IN_ISINF_SIGN:
10178 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10180 CASE_FLT_FN (BUILT_IN_ISNAN):
10181 case BUILT_IN_ISNAND32:
10182 case BUILT_IN_ISNAND64:
10183 case BUILT_IN_ISNAND128:
10184 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10186 case BUILT_IN_PRINTF:
10187 case BUILT_IN_PRINTF_UNLOCKED:
10188 case BUILT_IN_VPRINTF:
10189 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10191 default:
10192 break;
10195 return NULL_TREE;
10199 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10200 IGNORE is true if the result of the function call is ignored. This
10201 function returns NULL_TREE if no simplification was possible. */
10203 static tree
10204 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10206 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10207 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10209 switch (fcode)
10211 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10212 CASE_FLT_FN (BUILT_IN_JN):
10213 if (validate_arg (arg0, INTEGER_TYPE)
10214 && validate_arg (arg1, REAL_TYPE))
10215 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10216 break;
10218 CASE_FLT_FN (BUILT_IN_YN):
10219 if (validate_arg (arg0, INTEGER_TYPE)
10220 && validate_arg (arg1, REAL_TYPE))
10221 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10222 &dconst0, false);
10223 break;
10225 CASE_FLT_FN (BUILT_IN_DREM):
10226 CASE_FLT_FN (BUILT_IN_REMAINDER):
10227 if (validate_arg (arg0, REAL_TYPE)
10228 && validate_arg(arg1, REAL_TYPE))
10229 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10230 break;
10232 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10233 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10234 if (validate_arg (arg0, REAL_TYPE)
10235 && validate_arg(arg1, POINTER_TYPE))
10236 return do_mpfr_lgamma_r (arg0, arg1, type);
10237 break;
10238 #endif
10240 CASE_FLT_FN (BUILT_IN_ATAN2):
10241 if (validate_arg (arg0, REAL_TYPE)
10242 && validate_arg(arg1, REAL_TYPE))
10243 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10244 break;
10246 CASE_FLT_FN (BUILT_IN_FDIM):
10247 if (validate_arg (arg0, REAL_TYPE)
10248 && validate_arg(arg1, REAL_TYPE))
10249 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10250 break;
10252 CASE_FLT_FN (BUILT_IN_HYPOT):
10253 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10255 CASE_FLT_FN (BUILT_IN_LDEXP):
10256 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10257 CASE_FLT_FN (BUILT_IN_SCALBN):
10258 CASE_FLT_FN (BUILT_IN_SCALBLN):
10259 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10261 CASE_FLT_FN (BUILT_IN_FREXP):
10262 return fold_builtin_frexp (arg0, arg1, type);
10264 CASE_FLT_FN (BUILT_IN_MODF):
10265 return fold_builtin_modf (arg0, arg1, type);
10267 case BUILT_IN_BZERO:
10268 return fold_builtin_bzero (arg0, arg1, ignore);
10270 case BUILT_IN_FPUTS:
10271 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10273 case BUILT_IN_FPUTS_UNLOCKED:
10274 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10276 case BUILT_IN_STRSTR:
10277 return fold_builtin_strstr (arg0, arg1, type);
10279 case BUILT_IN_STRCAT:
10280 return fold_builtin_strcat (arg0, arg1);
10282 case BUILT_IN_STRSPN:
10283 return fold_builtin_strspn (arg0, arg1);
10285 case BUILT_IN_STRCSPN:
10286 return fold_builtin_strcspn (arg0, arg1);
10288 case BUILT_IN_STRCHR:
10289 case BUILT_IN_INDEX:
10290 return fold_builtin_strchr (arg0, arg1, type);
10292 case BUILT_IN_STRRCHR:
10293 case BUILT_IN_RINDEX:
10294 return fold_builtin_strrchr (arg0, arg1, type);
10296 case BUILT_IN_STRCPY:
10297 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10299 case BUILT_IN_STRCMP:
10300 return fold_builtin_strcmp (arg0, arg1);
10302 case BUILT_IN_STRPBRK:
10303 return fold_builtin_strpbrk (arg0, arg1, type);
10305 case BUILT_IN_EXPECT:
10306 return fold_builtin_expect (arg0, arg1);
10308 CASE_FLT_FN (BUILT_IN_POW):
10309 return fold_builtin_pow (fndecl, arg0, arg1, type);
10311 CASE_FLT_FN (BUILT_IN_POWI):
10312 return fold_builtin_powi (fndecl, arg0, arg1, type);
10314 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10315 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10317 CASE_FLT_FN (BUILT_IN_FMIN):
10318 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10320 CASE_FLT_FN (BUILT_IN_FMAX):
10321 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10323 case BUILT_IN_ISGREATER:
10324 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10325 case BUILT_IN_ISGREATEREQUAL:
10326 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10327 case BUILT_IN_ISLESS:
10328 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10329 case BUILT_IN_ISLESSEQUAL:
10330 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10331 case BUILT_IN_ISLESSGREATER:
10332 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10333 case BUILT_IN_ISUNORDERED:
10334 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10335 NOP_EXPR);
10337 /* We do the folding for va_start in the expander. */
10338 case BUILT_IN_VA_START:
10339 break;
10341 case BUILT_IN_SPRINTF:
10342 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10344 case BUILT_IN_OBJECT_SIZE:
10345 return fold_builtin_object_size (arg0, arg1);
10347 case BUILT_IN_PRINTF:
10348 case BUILT_IN_PRINTF_UNLOCKED:
10349 case BUILT_IN_VPRINTF:
10350 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10352 case BUILT_IN_PRINTF_CHK:
10353 case BUILT_IN_VPRINTF_CHK:
10354 if (!validate_arg (arg0, INTEGER_TYPE)
10355 || TREE_SIDE_EFFECTS (arg0))
10356 return NULL_TREE;
10357 else
10358 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10359 break;
10361 case BUILT_IN_FPRINTF:
10362 case BUILT_IN_FPRINTF_UNLOCKED:
10363 case BUILT_IN_VFPRINTF:
10364 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10365 ignore, fcode);
10367 default:
10368 break;
10370 return NULL_TREE;
10373 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10374 and ARG2. IGNORE is true if the result of the function call is ignored.
10375 This function returns NULL_TREE if no simplification was possible. */
10377 static tree
10378 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10380 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10381 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10382 switch (fcode)
10385 CASE_FLT_FN (BUILT_IN_SINCOS):
10386 return fold_builtin_sincos (arg0, arg1, arg2);
10388 CASE_FLT_FN (BUILT_IN_FMA):
10389 if (validate_arg (arg0, REAL_TYPE)
10390 && validate_arg(arg1, REAL_TYPE)
10391 && validate_arg(arg2, REAL_TYPE))
10392 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10393 break;
10395 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10396 CASE_FLT_FN (BUILT_IN_REMQUO):
10397 if (validate_arg (arg0, REAL_TYPE)
10398 && validate_arg(arg1, REAL_TYPE)
10399 && validate_arg(arg2, POINTER_TYPE))
10400 return do_mpfr_remquo (arg0, arg1, arg2);
10401 break;
10402 #endif
10404 case BUILT_IN_MEMSET:
10405 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10407 case BUILT_IN_BCOPY:
10408 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10410 case BUILT_IN_MEMCPY:
10411 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10413 case BUILT_IN_MEMPCPY:
10414 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10416 case BUILT_IN_MEMMOVE:
10417 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10419 case BUILT_IN_STRNCAT:
10420 return fold_builtin_strncat (arg0, arg1, arg2);
10422 case BUILT_IN_STRNCPY:
10423 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10425 case BUILT_IN_STRNCMP:
10426 return fold_builtin_strncmp (arg0, arg1, arg2);
10428 case BUILT_IN_MEMCHR:
10429 return fold_builtin_memchr (arg0, arg1, arg2, type);
10431 case BUILT_IN_BCMP:
10432 case BUILT_IN_MEMCMP:
10433 return fold_builtin_memcmp (arg0, arg1, arg2);;
10435 case BUILT_IN_SPRINTF:
10436 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10438 case BUILT_IN_STRCPY_CHK:
10439 case BUILT_IN_STPCPY_CHK:
10440 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10441 ignore, fcode);
10443 case BUILT_IN_STRCAT_CHK:
10444 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10446 case BUILT_IN_PRINTF_CHK:
10447 case BUILT_IN_VPRINTF_CHK:
10448 if (!validate_arg (arg0, INTEGER_TYPE)
10449 || TREE_SIDE_EFFECTS (arg0))
10450 return NULL_TREE;
10451 else
10452 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10453 break;
10455 case BUILT_IN_FPRINTF:
10456 case BUILT_IN_FPRINTF_UNLOCKED:
10457 case BUILT_IN_VFPRINTF:
10458 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10460 case BUILT_IN_FPRINTF_CHK:
10461 case BUILT_IN_VFPRINTF_CHK:
10462 if (!validate_arg (arg1, INTEGER_TYPE)
10463 || TREE_SIDE_EFFECTS (arg1))
10464 return NULL_TREE;
10465 else
10466 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10467 ignore, fcode);
10469 default:
10470 break;
10472 return NULL_TREE;
10475 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10476 ARG2, and ARG3. IGNORE is true if the result of the function call is
10477 ignored. This function returns NULL_TREE if no simplification was
10478 possible. */
10480 static tree
10481 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10482 bool ignore)
10484 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10486 switch (fcode)
10488 case BUILT_IN_MEMCPY_CHK:
10489 case BUILT_IN_MEMPCPY_CHK:
10490 case BUILT_IN_MEMMOVE_CHK:
10491 case BUILT_IN_MEMSET_CHK:
10492 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10493 NULL_TREE, ignore,
10494 DECL_FUNCTION_CODE (fndecl));
10496 case BUILT_IN_STRNCPY_CHK:
10497 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10499 case BUILT_IN_STRNCAT_CHK:
10500 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10502 case BUILT_IN_FPRINTF_CHK:
10503 case BUILT_IN_VFPRINTF_CHK:
10504 if (!validate_arg (arg1, INTEGER_TYPE)
10505 || TREE_SIDE_EFFECTS (arg1))
10506 return NULL_TREE;
10507 else
10508 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10509 ignore, fcode);
10510 break;
10512 default:
10513 break;
10515 return NULL_TREE;
10518 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10519 arguments, where NARGS <= 4. IGNORE is true if the result of the
10520 function call is ignored. This function returns NULL_TREE if no
10521 simplification was possible. Note that this only folds builtins with
10522 fixed argument patterns. Foldings that do varargs-to-varargs
10523 transformations, or that match calls with more than 4 arguments,
10524 need to be handled with fold_builtin_varargs instead. */
10526 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10528 static tree
10529 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10531 tree ret = NULL_TREE;
10533 switch (nargs)
10535 case 0:
10536 ret = fold_builtin_0 (fndecl, ignore);
10537 break;
10538 case 1:
10539 ret = fold_builtin_1 (fndecl, args[0], ignore);
10540 break;
10541 case 2:
10542 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10543 break;
10544 case 3:
10545 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10546 break;
10547 case 4:
10548 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10549 ignore);
10550 break;
10551 default:
10552 break;
10554 if (ret)
10556 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10557 TREE_NO_WARNING (ret) = 1;
10558 return ret;
10560 return NULL_TREE;
10563 /* Builtins with folding operations that operate on "..." arguments
10564 need special handling; we need to store the arguments in a convenient
10565 data structure before attempting any folding. Fortunately there are
10566 only a few builtins that fall into this category. FNDECL is the
10567 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10568 result of the function call is ignored. */
10570 static tree
10571 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10573 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10574 tree ret = NULL_TREE;
10576 switch (fcode)
10578 case BUILT_IN_SPRINTF_CHK:
10579 case BUILT_IN_VSPRINTF_CHK:
10580 ret = fold_builtin_sprintf_chk (exp, fcode);
10581 break;
10583 case BUILT_IN_SNPRINTF_CHK:
10584 case BUILT_IN_VSNPRINTF_CHK:
10585 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10586 break;
10588 case BUILT_IN_FPCLASSIFY:
10589 ret = fold_builtin_fpclassify (exp);
10590 break;
10592 default:
10593 break;
10595 if (ret)
10597 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10598 TREE_NO_WARNING (ret) = 1;
10599 return ret;
10601 return NULL_TREE;
10604 /* A wrapper function for builtin folding that prevents warnings for
10605 "statement without effect" and the like, caused by removing the
10606 call node earlier than the warning is generated. */
10608 tree
10609 fold_call_expr (tree exp, bool ignore)
10611 tree ret = NULL_TREE;
10612 tree fndecl = get_callee_fndecl (exp);
10613 if (fndecl
10614 && TREE_CODE (fndecl) == FUNCTION_DECL
10615 && DECL_BUILT_IN (fndecl)
10616 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10617 yet. Defer folding until we see all the arguments
10618 (after inlining). */
10619 && !CALL_EXPR_VA_ARG_PACK (exp))
10621 int nargs = call_expr_nargs (exp);
10623 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10624 instead last argument is __builtin_va_arg_pack (). Defer folding
10625 even in that case, until arguments are finalized. */
10626 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10628 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10629 if (fndecl2
10630 && TREE_CODE (fndecl2) == FUNCTION_DECL
10631 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10632 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10633 return NULL_TREE;
10636 /* FIXME: Don't use a list in this interface. */
10637 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10638 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10639 else
10641 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10643 tree *args = CALL_EXPR_ARGP (exp);
10644 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10646 if (!ret)
10647 ret = fold_builtin_varargs (fndecl, exp, ignore);
10648 if (ret)
10650 /* Propagate location information from original call to
10651 expansion of builtin. Otherwise things like
10652 maybe_emit_chk_warning, that operate on the expansion
10653 of a builtin, will use the wrong location information. */
10654 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10656 tree realret = ret;
10657 if (TREE_CODE (ret) == NOP_EXPR)
10658 realret = TREE_OPERAND (ret, 0);
10659 if (CAN_HAVE_LOCATION_P (realret)
10660 && !EXPR_HAS_LOCATION (realret))
10661 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10662 return realret;
10664 return ret;
10668 return NULL_TREE;
10671 /* Conveniently construct a function call expression. FNDECL names the
10672 function to be called and ARGLIST is a TREE_LIST of arguments. */
10674 tree
10675 build_function_call_expr (tree fndecl, tree arglist)
10677 tree fntype = TREE_TYPE (fndecl);
10678 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10679 int n = list_length (arglist);
10680 tree *argarray = (tree *) alloca (n * sizeof (tree));
10681 int i;
10683 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10684 argarray[i] = TREE_VALUE (arglist);
10685 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10688 /* Conveniently construct a function call expression. FNDECL names the
10689 function to be called, N is the number of arguments, and the "..."
10690 parameters are the argument expressions. */
10692 tree
10693 build_call_expr (tree fndecl, int n, ...)
10695 va_list ap;
10696 tree fntype = TREE_TYPE (fndecl);
10697 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10698 tree *argarray = (tree *) alloca (n * sizeof (tree));
10699 int i;
10701 va_start (ap, n);
10702 for (i = 0; i < n; i++)
10703 argarray[i] = va_arg (ap, tree);
10704 va_end (ap);
10705 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10708 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10709 N arguments are passed in the array ARGARRAY. */
10711 tree
10712 fold_builtin_call_array (tree type,
10713 tree fn,
10714 int n,
10715 tree *argarray)
10717 tree ret = NULL_TREE;
10718 int i;
10719 tree exp;
10721 if (TREE_CODE (fn) == ADDR_EXPR)
10723 tree fndecl = TREE_OPERAND (fn, 0);
10724 if (TREE_CODE (fndecl) == FUNCTION_DECL
10725 && DECL_BUILT_IN (fndecl))
10727 /* If last argument is __builtin_va_arg_pack (), arguments to this
10728 function are not finalized yet. Defer folding until they are. */
10729 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10731 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10732 if (fndecl2
10733 && TREE_CODE (fndecl2) == FUNCTION_DECL
10734 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10735 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10736 return build_call_array (type, fn, n, argarray);
10738 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10740 tree arglist = NULL_TREE;
10741 for (i = n - 1; i >= 0; i--)
10742 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10743 ret = targetm.fold_builtin (fndecl, arglist, false);
10744 if (ret)
10745 return ret;
10747 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10749 /* First try the transformations that don't require consing up
10750 an exp. */
10751 ret = fold_builtin_n (fndecl, argarray, n, false);
10752 if (ret)
10753 return ret;
10756 /* If we got this far, we need to build an exp. */
10757 exp = build_call_array (type, fn, n, argarray);
10758 ret = fold_builtin_varargs (fndecl, exp, false);
10759 return ret ? ret : exp;
10763 return build_call_array (type, fn, n, argarray);
10766 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10767 along with N new arguments specified as the "..." parameters. SKIP
10768 is the number of arguments in EXP to be omitted. This function is used
10769 to do varargs-to-varargs transformations. */
10771 static tree
10772 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10774 int oldnargs = call_expr_nargs (exp);
10775 int nargs = oldnargs - skip + n;
10776 tree fntype = TREE_TYPE (fndecl);
10777 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10778 tree *buffer;
10780 if (n > 0)
10782 int i, j;
10783 va_list ap;
10785 buffer = alloca (nargs * sizeof (tree));
10786 va_start (ap, n);
10787 for (i = 0; i < n; i++)
10788 buffer[i] = va_arg (ap, tree);
10789 va_end (ap);
10790 for (j = skip; j < oldnargs; j++, i++)
10791 buffer[i] = CALL_EXPR_ARG (exp, j);
10793 else
10794 buffer = CALL_EXPR_ARGP (exp) + skip;
10796 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10799 /* Validate a single argument ARG against a tree code CODE representing
10800 a type. */
10802 static bool
10803 validate_arg (const_tree arg, enum tree_code code)
10805 if (!arg)
10806 return false;
10807 else if (code == POINTER_TYPE)
10808 return POINTER_TYPE_P (TREE_TYPE (arg));
10809 else if (code == INTEGER_TYPE)
10810 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10811 return code == TREE_CODE (TREE_TYPE (arg));
10814 /* This function validates the types of a function call argument list
10815 against a specified list of tree_codes. If the last specifier is a 0,
10816 that represents an ellipses, otherwise the last specifier must be a
10817 VOID_TYPE. */
10819 bool
10820 validate_arglist (const_tree callexpr, ...)
10822 enum tree_code code;
10823 bool res = 0;
10824 va_list ap;
10825 const_call_expr_arg_iterator iter;
10826 const_tree arg;
10828 va_start (ap, callexpr);
10829 init_const_call_expr_arg_iterator (callexpr, &iter);
10833 code = va_arg (ap, enum tree_code);
10834 switch (code)
10836 case 0:
10837 /* This signifies an ellipses, any further arguments are all ok. */
10838 res = true;
10839 goto end;
10840 case VOID_TYPE:
10841 /* This signifies an endlink, if no arguments remain, return
10842 true, otherwise return false. */
10843 res = !more_const_call_expr_args_p (&iter);
10844 goto end;
10845 default:
10846 /* If no parameters remain or the parameter's code does not
10847 match the specified code, return false. Otherwise continue
10848 checking any remaining arguments. */
10849 arg = next_const_call_expr_arg (&iter);
10850 if (!validate_arg (arg, code))
10851 goto end;
10852 break;
10855 while (1);
10857 /* We need gotos here since we can only have one VA_CLOSE in a
10858 function. */
10859 end: ;
10860 va_end (ap);
10862 return res;
10865 /* Default target-specific builtin expander that does nothing. */
10868 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10869 rtx target ATTRIBUTE_UNUSED,
10870 rtx subtarget ATTRIBUTE_UNUSED,
10871 enum machine_mode mode ATTRIBUTE_UNUSED,
10872 int ignore ATTRIBUTE_UNUSED)
10874 return NULL_RTX;
10877 /* Returns true is EXP represents data that would potentially reside
10878 in a readonly section. */
10880 static bool
10881 readonly_data_expr (tree exp)
10883 STRIP_NOPS (exp);
10885 if (TREE_CODE (exp) != ADDR_EXPR)
10886 return false;
10888 exp = get_base_address (TREE_OPERAND (exp, 0));
10889 if (!exp)
10890 return false;
10892 /* Make sure we call decl_readonly_section only for trees it
10893 can handle (since it returns true for everything it doesn't
10894 understand). */
10895 if (TREE_CODE (exp) == STRING_CST
10896 || TREE_CODE (exp) == CONSTRUCTOR
10897 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10898 return decl_readonly_section (exp, 0);
10899 else
10900 return false;
10903 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10904 to the call, and TYPE is its return type.
10906 Return NULL_TREE if no simplification was possible, otherwise return the
10907 simplified form of the call as a tree.
10909 The simplified form may be a constant or other expression which
10910 computes the same value, but in a more efficient manner (including
10911 calls to other builtin functions).
10913 The call may contain arguments which need to be evaluated, but
10914 which are not useful to determine the result of the call. In
10915 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10916 COMPOUND_EXPR will be an argument which must be evaluated.
10917 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10918 COMPOUND_EXPR in the chain will contain the tree for the simplified
10919 form of the builtin function call. */
10921 static tree
10922 fold_builtin_strstr (tree s1, tree s2, tree type)
10924 if (!validate_arg (s1, POINTER_TYPE)
10925 || !validate_arg (s2, POINTER_TYPE))
10926 return NULL_TREE;
10927 else
10929 tree fn;
10930 const char *p1, *p2;
10932 p2 = c_getstr (s2);
10933 if (p2 == NULL)
10934 return NULL_TREE;
10936 p1 = c_getstr (s1);
10937 if (p1 != NULL)
10939 const char *r = strstr (p1, p2);
10940 tree tem;
10942 if (r == NULL)
10943 return build_int_cst (TREE_TYPE (s1), 0);
10945 /* Return an offset into the constant string argument. */
10946 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10947 s1, size_int (r - p1));
10948 return fold_convert (type, tem);
10951 /* The argument is const char *, and the result is char *, so we need
10952 a type conversion here to avoid a warning. */
10953 if (p2[0] == '\0')
10954 return fold_convert (type, s1);
10956 if (p2[1] != '\0')
10957 return NULL_TREE;
10959 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10960 if (!fn)
10961 return NULL_TREE;
10963 /* New argument list transforming strstr(s1, s2) to
10964 strchr(s1, s2[0]). */
10965 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10969 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10970 the call, and TYPE is its return type.
10972 Return NULL_TREE if no simplification was possible, otherwise return the
10973 simplified form of the call as a tree.
10975 The simplified form may be a constant or other expression which
10976 computes the same value, but in a more efficient manner (including
10977 calls to other builtin functions).
10979 The call may contain arguments which need to be evaluated, but
10980 which are not useful to determine the result of the call. In
10981 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10982 COMPOUND_EXPR will be an argument which must be evaluated.
10983 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10984 COMPOUND_EXPR in the chain will contain the tree for the simplified
10985 form of the builtin function call. */
10987 static tree
10988 fold_builtin_strchr (tree s1, tree s2, tree type)
10990 if (!validate_arg (s1, POINTER_TYPE)
10991 || !validate_arg (s2, INTEGER_TYPE))
10992 return NULL_TREE;
10993 else
10995 const char *p1;
10997 if (TREE_CODE (s2) != INTEGER_CST)
10998 return NULL_TREE;
11000 p1 = c_getstr (s1);
11001 if (p1 != NULL)
11003 char c;
11004 const char *r;
11005 tree tem;
11007 if (target_char_cast (s2, &c))
11008 return NULL_TREE;
11010 r = strchr (p1, c);
11012 if (r == NULL)
11013 return build_int_cst (TREE_TYPE (s1), 0);
11015 /* Return an offset into the constant string argument. */
11016 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11017 s1, size_int (r - p1));
11018 return fold_convert (type, tem);
11020 return NULL_TREE;
11024 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11025 the call, and TYPE is its return type.
11027 Return NULL_TREE if no simplification was possible, otherwise return the
11028 simplified form of the call as a tree.
11030 The simplified form may be a constant or other expression which
11031 computes the same value, but in a more efficient manner (including
11032 calls to other builtin functions).
11034 The call may contain arguments which need to be evaluated, but
11035 which are not useful to determine the result of the call. In
11036 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11037 COMPOUND_EXPR will be an argument which must be evaluated.
11038 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11039 COMPOUND_EXPR in the chain will contain the tree for the simplified
11040 form of the builtin function call. */
11042 static tree
11043 fold_builtin_strrchr (tree s1, tree s2, tree type)
11045 if (!validate_arg (s1, POINTER_TYPE)
11046 || !validate_arg (s2, INTEGER_TYPE))
11047 return NULL_TREE;
11048 else
11050 tree fn;
11051 const char *p1;
11053 if (TREE_CODE (s2) != INTEGER_CST)
11054 return NULL_TREE;
11056 p1 = c_getstr (s1);
11057 if (p1 != NULL)
11059 char c;
11060 const char *r;
11061 tree tem;
11063 if (target_char_cast (s2, &c))
11064 return NULL_TREE;
11066 r = strrchr (p1, c);
11068 if (r == NULL)
11069 return build_int_cst (TREE_TYPE (s1), 0);
11071 /* Return an offset into the constant string argument. */
11072 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11073 s1, size_int (r - p1));
11074 return fold_convert (type, tem);
11077 if (! integer_zerop (s2))
11078 return NULL_TREE;
11080 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11081 if (!fn)
11082 return NULL_TREE;
11084 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11085 return build_call_expr (fn, 2, s1, s2);
11089 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11090 to the call, and TYPE is its return type.
11092 Return NULL_TREE if no simplification was possible, otherwise return the
11093 simplified form of the call as a tree.
11095 The simplified form may be a constant or other expression which
11096 computes the same value, but in a more efficient manner (including
11097 calls to other builtin functions).
11099 The call may contain arguments which need to be evaluated, but
11100 which are not useful to determine the result of the call. In
11101 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11102 COMPOUND_EXPR will be an argument which must be evaluated.
11103 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11104 COMPOUND_EXPR in the chain will contain the tree for the simplified
11105 form of the builtin function call. */
11107 static tree
11108 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11110 if (!validate_arg (s1, POINTER_TYPE)
11111 || !validate_arg (s2, POINTER_TYPE))
11112 return NULL_TREE;
11113 else
11115 tree fn;
11116 const char *p1, *p2;
11118 p2 = c_getstr (s2);
11119 if (p2 == NULL)
11120 return NULL_TREE;
11122 p1 = c_getstr (s1);
11123 if (p1 != NULL)
11125 const char *r = strpbrk (p1, p2);
11126 tree tem;
11128 if (r == NULL)
11129 return build_int_cst (TREE_TYPE (s1), 0);
11131 /* Return an offset into the constant string argument. */
11132 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11133 s1, size_int (r - p1));
11134 return fold_convert (type, tem);
11137 if (p2[0] == '\0')
11138 /* strpbrk(x, "") == NULL.
11139 Evaluate and ignore s1 in case it had side-effects. */
11140 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11142 if (p2[1] != '\0')
11143 return NULL_TREE; /* Really call strpbrk. */
11145 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11146 if (!fn)
11147 return NULL_TREE;
11149 /* New argument list transforming strpbrk(s1, s2) to
11150 strchr(s1, s2[0]). */
11151 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11155 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11156 to the call.
11158 Return NULL_TREE if no simplification was possible, otherwise return the
11159 simplified form of the call as a tree.
11161 The simplified form may be a constant or other expression which
11162 computes the same value, but in a more efficient manner (including
11163 calls to other builtin functions).
11165 The call may contain arguments which need to be evaluated, but
11166 which are not useful to determine the result of the call. In
11167 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11168 COMPOUND_EXPR will be an argument which must be evaluated.
11169 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11170 COMPOUND_EXPR in the chain will contain the tree for the simplified
11171 form of the builtin function call. */
11173 static tree
11174 fold_builtin_strcat (tree dst, tree src)
11176 if (!validate_arg (dst, POINTER_TYPE)
11177 || !validate_arg (src, POINTER_TYPE))
11178 return NULL_TREE;
11179 else
11181 const char *p = c_getstr (src);
11183 /* If the string length is zero, return the dst parameter. */
11184 if (p && *p == '\0')
11185 return dst;
11187 return NULL_TREE;
11191 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11192 arguments to the call.
11194 Return NULL_TREE if no simplification was possible, otherwise return the
11195 simplified form of the call as a tree.
11197 The simplified form may be a constant or other expression which
11198 computes the same value, but in a more efficient manner (including
11199 calls to other builtin functions).
11201 The call may contain arguments which need to be evaluated, but
11202 which are not useful to determine the result of the call. In
11203 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11204 COMPOUND_EXPR will be an argument which must be evaluated.
11205 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11206 COMPOUND_EXPR in the chain will contain the tree for the simplified
11207 form of the builtin function call. */
11209 static tree
11210 fold_builtin_strncat (tree dst, tree src, tree len)
11212 if (!validate_arg (dst, POINTER_TYPE)
11213 || !validate_arg (src, POINTER_TYPE)
11214 || !validate_arg (len, INTEGER_TYPE))
11215 return NULL_TREE;
11216 else
11218 const char *p = c_getstr (src);
11220 /* If the requested length is zero, or the src parameter string
11221 length is zero, return the dst parameter. */
11222 if (integer_zerop (len) || (p && *p == '\0'))
11223 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11225 /* If the requested len is greater than or equal to the string
11226 length, call strcat. */
11227 if (TREE_CODE (len) == INTEGER_CST && p
11228 && compare_tree_int (len, strlen (p)) >= 0)
11230 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11232 /* If the replacement _DECL isn't initialized, don't do the
11233 transformation. */
11234 if (!fn)
11235 return NULL_TREE;
11237 return build_call_expr (fn, 2, dst, src);
11239 return NULL_TREE;
11243 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11244 to the call.
11246 Return NULL_TREE if no simplification was possible, otherwise return the
11247 simplified form of the call as a tree.
11249 The simplified form may be a constant or other expression which
11250 computes the same value, but in a more efficient manner (including
11251 calls to other builtin functions).
11253 The call may contain arguments which need to be evaluated, but
11254 which are not useful to determine the result of the call. In
11255 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11256 COMPOUND_EXPR will be an argument which must be evaluated.
11257 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11258 COMPOUND_EXPR in the chain will contain the tree for the simplified
11259 form of the builtin function call. */
11261 static tree
11262 fold_builtin_strspn (tree s1, tree s2)
11264 if (!validate_arg (s1, POINTER_TYPE)
11265 || !validate_arg (s2, POINTER_TYPE))
11266 return NULL_TREE;
11267 else
11269 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11271 /* If both arguments are constants, evaluate at compile-time. */
11272 if (p1 && p2)
11274 const size_t r = strspn (p1, p2);
11275 return size_int (r);
11278 /* If either argument is "", return NULL_TREE. */
11279 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11280 /* Evaluate and ignore both arguments in case either one has
11281 side-effects. */
11282 return omit_two_operands (integer_type_node, integer_zero_node,
11283 s1, s2);
11284 return NULL_TREE;
11288 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11289 to the call.
11291 Return NULL_TREE if no simplification was possible, otherwise return the
11292 simplified form of the call as a tree.
11294 The simplified form may be a constant or other expression which
11295 computes the same value, but in a more efficient manner (including
11296 calls to other builtin functions).
11298 The call may contain arguments which need to be evaluated, but
11299 which are not useful to determine the result of the call. In
11300 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11301 COMPOUND_EXPR will be an argument which must be evaluated.
11302 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11303 COMPOUND_EXPR in the chain will contain the tree for the simplified
11304 form of the builtin function call. */
11306 static tree
11307 fold_builtin_strcspn (tree s1, tree s2)
11309 if (!validate_arg (s1, POINTER_TYPE)
11310 || !validate_arg (s2, POINTER_TYPE))
11311 return NULL_TREE;
11312 else
11314 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11316 /* If both arguments are constants, evaluate at compile-time. */
11317 if (p1 && p2)
11319 const size_t r = strcspn (p1, p2);
11320 return size_int (r);
11323 /* If the first argument is "", return NULL_TREE. */
11324 if (p1 && *p1 == '\0')
11326 /* Evaluate and ignore argument s2 in case it has
11327 side-effects. */
11328 return omit_one_operand (integer_type_node,
11329 integer_zero_node, s2);
11332 /* If the second argument is "", return __builtin_strlen(s1). */
11333 if (p2 && *p2 == '\0')
11335 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11337 /* If the replacement _DECL isn't initialized, don't do the
11338 transformation. */
11339 if (!fn)
11340 return NULL_TREE;
11342 return build_call_expr (fn, 1, s1);
11344 return NULL_TREE;
11348 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11349 to the call. IGNORE is true if the value returned
11350 by the builtin will be ignored. UNLOCKED is true is true if this
11351 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11352 the known length of the string. Return NULL_TREE if no simplification
11353 was possible. */
11355 tree
11356 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11358 /* If we're using an unlocked function, assume the other unlocked
11359 functions exist explicitly. */
11360 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11361 : implicit_built_in_decls[BUILT_IN_FPUTC];
11362 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11363 : implicit_built_in_decls[BUILT_IN_FWRITE];
11365 /* If the return value is used, don't do the transformation. */
11366 if (!ignore)
11367 return NULL_TREE;
11369 /* Verify the arguments in the original call. */
11370 if (!validate_arg (arg0, POINTER_TYPE)
11371 || !validate_arg (arg1, POINTER_TYPE))
11372 return NULL_TREE;
11374 if (! len)
11375 len = c_strlen (arg0, 0);
11377 /* Get the length of the string passed to fputs. If the length
11378 can't be determined, punt. */
11379 if (!len
11380 || TREE_CODE (len) != INTEGER_CST)
11381 return NULL_TREE;
11383 switch (compare_tree_int (len, 1))
11385 case -1: /* length is 0, delete the call entirely . */
11386 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11388 case 0: /* length is 1, call fputc. */
11390 const char *p = c_getstr (arg0);
11392 if (p != NULL)
11394 if (fn_fputc)
11395 return build_call_expr (fn_fputc, 2,
11396 build_int_cst (NULL_TREE, p[0]), arg1);
11397 else
11398 return NULL_TREE;
11401 /* FALLTHROUGH */
11402 case 1: /* length is greater than 1, call fwrite. */
11404 /* If optimizing for size keep fputs. */
11405 if (optimize_size)
11406 return NULL_TREE;
11407 /* New argument list transforming fputs(string, stream) to
11408 fwrite(string, 1, len, stream). */
11409 if (fn_fwrite)
11410 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11411 else
11412 return NULL_TREE;
11414 default:
11415 gcc_unreachable ();
11417 return NULL_TREE;
11420 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11421 produced. False otherwise. This is done so that we don't output the error
11422 or warning twice or three times. */
11423 bool
11424 fold_builtin_next_arg (tree exp, bool va_start_p)
11426 tree fntype = TREE_TYPE (current_function_decl);
11427 int nargs = call_expr_nargs (exp);
11428 tree arg;
11430 if (TYPE_ARG_TYPES (fntype) == 0
11431 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11432 == void_type_node))
11434 error ("%<va_start%> used in function with fixed args");
11435 return true;
11438 if (va_start_p)
11440 if (va_start_p && (nargs != 2))
11442 error ("wrong number of arguments to function %<va_start%>");
11443 return true;
11445 arg = CALL_EXPR_ARG (exp, 1);
11447 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11448 when we checked the arguments and if needed issued a warning. */
11449 else
11451 if (nargs == 0)
11453 /* Evidently an out of date version of <stdarg.h>; can't validate
11454 va_start's second argument, but can still work as intended. */
11455 warning (0, "%<__builtin_next_arg%> called without an argument");
11456 return true;
11458 else if (nargs > 1)
11460 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11461 return true;
11463 arg = CALL_EXPR_ARG (exp, 0);
11466 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11467 or __builtin_next_arg (0) the first time we see it, after checking
11468 the arguments and if needed issuing a warning. */
11469 if (!integer_zerop (arg))
11471 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11473 /* Strip off all nops for the sake of the comparison. This
11474 is not quite the same as STRIP_NOPS. It does more.
11475 We must also strip off INDIRECT_EXPR for C++ reference
11476 parameters. */
11477 while (CONVERT_EXPR_P (arg)
11478 || TREE_CODE (arg) == INDIRECT_REF)
11479 arg = TREE_OPERAND (arg, 0);
11480 if (arg != last_parm)
11482 /* FIXME: Sometimes with the tree optimizers we can get the
11483 not the last argument even though the user used the last
11484 argument. We just warn and set the arg to be the last
11485 argument so that we will get wrong-code because of
11486 it. */
11487 warning (0, "second parameter of %<va_start%> not last named argument");
11489 /* We want to verify the second parameter just once before the tree
11490 optimizers are run and then avoid keeping it in the tree,
11491 as otherwise we could warn even for correct code like:
11492 void foo (int i, ...)
11493 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11494 if (va_start_p)
11495 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11496 else
11497 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11499 return false;
11503 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11504 ORIG may be null if this is a 2-argument call. We don't attempt to
11505 simplify calls with more than 3 arguments.
11507 Return NULL_TREE if no simplification was possible, otherwise return the
11508 simplified form of the call as a tree. If IGNORED is true, it means that
11509 the caller does not use the returned value of the function. */
11511 static tree
11512 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11514 tree call, retval;
11515 const char *fmt_str = NULL;
11517 /* Verify the required arguments in the original call. We deal with two
11518 types of sprintf() calls: 'sprintf (str, fmt)' and
11519 'sprintf (dest, "%s", orig)'. */
11520 if (!validate_arg (dest, POINTER_TYPE)
11521 || !validate_arg (fmt, POINTER_TYPE))
11522 return NULL_TREE;
11523 if (orig && !validate_arg (orig, POINTER_TYPE))
11524 return NULL_TREE;
11526 /* Check whether the format is a literal string constant. */
11527 fmt_str = c_getstr (fmt);
11528 if (fmt_str == NULL)
11529 return NULL_TREE;
11531 call = NULL_TREE;
11532 retval = NULL_TREE;
11534 if (!init_target_chars ())
11535 return NULL_TREE;
11537 /* If the format doesn't contain % args or %%, use strcpy. */
11538 if (strchr (fmt_str, target_percent) == NULL)
11540 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11542 if (!fn)
11543 return NULL_TREE;
11545 /* Don't optimize sprintf (buf, "abc", ptr++). */
11546 if (orig)
11547 return NULL_TREE;
11549 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11550 'format' is known to contain no % formats. */
11551 call = build_call_expr (fn, 2, dest, fmt);
11552 if (!ignored)
11553 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11556 /* If the format is "%s", use strcpy if the result isn't used. */
11557 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11559 tree fn;
11560 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11562 if (!fn)
11563 return NULL_TREE;
11565 /* Don't crash on sprintf (str1, "%s"). */
11566 if (!orig)
11567 return NULL_TREE;
11569 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11570 if (!ignored)
11572 retval = c_strlen (orig, 1);
11573 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11574 return NULL_TREE;
11576 call = build_call_expr (fn, 2, dest, orig);
11579 if (call && retval)
11581 retval = fold_convert
11582 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11583 retval);
11584 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11586 else
11587 return call;
11590 /* Expand a call EXP to __builtin_object_size. */
11593 expand_builtin_object_size (tree exp)
11595 tree ost;
11596 int object_size_type;
11597 tree fndecl = get_callee_fndecl (exp);
11599 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11601 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11602 exp, fndecl);
11603 expand_builtin_trap ();
11604 return const0_rtx;
11607 ost = CALL_EXPR_ARG (exp, 1);
11608 STRIP_NOPS (ost);
11610 if (TREE_CODE (ost) != INTEGER_CST
11611 || tree_int_cst_sgn (ost) < 0
11612 || compare_tree_int (ost, 3) > 0)
11614 error ("%Klast argument of %D is not integer constant between 0 and 3",
11615 exp, fndecl);
11616 expand_builtin_trap ();
11617 return const0_rtx;
11620 object_size_type = tree_low_cst (ost, 0);
11622 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11625 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11626 FCODE is the BUILT_IN_* to use.
11627 Return NULL_RTX if we failed; the caller should emit a normal call,
11628 otherwise try to get the result in TARGET, if convenient (and in
11629 mode MODE if that's convenient). */
11631 static rtx
11632 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11633 enum built_in_function fcode)
11635 tree dest, src, len, size;
11637 if (!validate_arglist (exp,
11638 POINTER_TYPE,
11639 fcode == BUILT_IN_MEMSET_CHK
11640 ? INTEGER_TYPE : POINTER_TYPE,
11641 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11642 return NULL_RTX;
11644 dest = CALL_EXPR_ARG (exp, 0);
11645 src = CALL_EXPR_ARG (exp, 1);
11646 len = CALL_EXPR_ARG (exp, 2);
11647 size = CALL_EXPR_ARG (exp, 3);
11649 if (! host_integerp (size, 1))
11650 return NULL_RTX;
11652 if (host_integerp (len, 1) || integer_all_onesp (size))
11654 tree fn;
11656 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11658 warning (0, "%Kcall to %D will always overflow destination buffer",
11659 exp, get_callee_fndecl (exp));
11660 return NULL_RTX;
11663 fn = NULL_TREE;
11664 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11665 mem{cpy,pcpy,move,set} is available. */
11666 switch (fcode)
11668 case BUILT_IN_MEMCPY_CHK:
11669 fn = built_in_decls[BUILT_IN_MEMCPY];
11670 break;
11671 case BUILT_IN_MEMPCPY_CHK:
11672 fn = built_in_decls[BUILT_IN_MEMPCPY];
11673 break;
11674 case BUILT_IN_MEMMOVE_CHK:
11675 fn = built_in_decls[BUILT_IN_MEMMOVE];
11676 break;
11677 case BUILT_IN_MEMSET_CHK:
11678 fn = built_in_decls[BUILT_IN_MEMSET];
11679 break;
11680 default:
11681 break;
11684 if (! fn)
11685 return NULL_RTX;
11687 fn = build_call_expr (fn, 3, dest, src, len);
11688 STRIP_TYPE_NOPS (fn);
11689 while (TREE_CODE (fn) == COMPOUND_EXPR)
11691 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11692 EXPAND_NORMAL);
11693 fn = TREE_OPERAND (fn, 1);
11695 if (TREE_CODE (fn) == CALL_EXPR)
11696 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11697 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11699 else if (fcode == BUILT_IN_MEMSET_CHK)
11700 return NULL_RTX;
11701 else
11703 unsigned int dest_align
11704 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11706 /* If DEST is not a pointer type, call the normal function. */
11707 if (dest_align == 0)
11708 return NULL_RTX;
11710 /* If SRC and DEST are the same (and not volatile), do nothing. */
11711 if (operand_equal_p (src, dest, 0))
11713 tree expr;
11715 if (fcode != BUILT_IN_MEMPCPY_CHK)
11717 /* Evaluate and ignore LEN in case it has side-effects. */
11718 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11719 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11722 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11723 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11726 /* __memmove_chk special case. */
11727 if (fcode == BUILT_IN_MEMMOVE_CHK)
11729 unsigned int src_align
11730 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11732 if (src_align == 0)
11733 return NULL_RTX;
11735 /* If src is categorized for a readonly section we can use
11736 normal __memcpy_chk. */
11737 if (readonly_data_expr (src))
11739 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11740 if (!fn)
11741 return NULL_RTX;
11742 fn = build_call_expr (fn, 4, dest, src, len, size);
11743 STRIP_TYPE_NOPS (fn);
11744 while (TREE_CODE (fn) == COMPOUND_EXPR)
11746 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11747 EXPAND_NORMAL);
11748 fn = TREE_OPERAND (fn, 1);
11750 if (TREE_CODE (fn) == CALL_EXPR)
11751 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11752 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11755 return NULL_RTX;
11759 /* Emit warning if a buffer overflow is detected at compile time. */
11761 static void
11762 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11764 int is_strlen = 0;
11765 tree len, size;
11767 switch (fcode)
11769 case BUILT_IN_STRCPY_CHK:
11770 case BUILT_IN_STPCPY_CHK:
11771 /* For __strcat_chk the warning will be emitted only if overflowing
11772 by at least strlen (dest) + 1 bytes. */
11773 case BUILT_IN_STRCAT_CHK:
11774 len = CALL_EXPR_ARG (exp, 1);
11775 size = CALL_EXPR_ARG (exp, 2);
11776 is_strlen = 1;
11777 break;
11778 case BUILT_IN_STRNCAT_CHK:
11779 case BUILT_IN_STRNCPY_CHK:
11780 len = CALL_EXPR_ARG (exp, 2);
11781 size = CALL_EXPR_ARG (exp, 3);
11782 break;
11783 case BUILT_IN_SNPRINTF_CHK:
11784 case BUILT_IN_VSNPRINTF_CHK:
11785 len = CALL_EXPR_ARG (exp, 1);
11786 size = CALL_EXPR_ARG (exp, 3);
11787 break;
11788 default:
11789 gcc_unreachable ();
11792 if (!len || !size)
11793 return;
11795 if (! host_integerp (size, 1) || integer_all_onesp (size))
11796 return;
11798 if (is_strlen)
11800 len = c_strlen (len, 1);
11801 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11802 return;
11804 else if (fcode == BUILT_IN_STRNCAT_CHK)
11806 tree src = CALL_EXPR_ARG (exp, 1);
11807 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11808 return;
11809 src = c_strlen (src, 1);
11810 if (! src || ! host_integerp (src, 1))
11812 warning (0, "%Kcall to %D might overflow destination buffer",
11813 exp, get_callee_fndecl (exp));
11814 return;
11816 else if (tree_int_cst_lt (src, size))
11817 return;
11819 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11820 return;
11822 warning (0, "%Kcall to %D will always overflow destination buffer",
11823 exp, get_callee_fndecl (exp));
11826 /* Emit warning if a buffer overflow is detected at compile time
11827 in __sprintf_chk/__vsprintf_chk calls. */
11829 static void
11830 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11832 tree dest, size, len, fmt, flag;
11833 const char *fmt_str;
11834 int nargs = call_expr_nargs (exp);
11836 /* Verify the required arguments in the original call. */
11838 if (nargs < 4)
11839 return;
11840 dest = CALL_EXPR_ARG (exp, 0);
11841 flag = CALL_EXPR_ARG (exp, 1);
11842 size = CALL_EXPR_ARG (exp, 2);
11843 fmt = CALL_EXPR_ARG (exp, 3);
11845 if (! host_integerp (size, 1) || integer_all_onesp (size))
11846 return;
11848 /* Check whether the format is a literal string constant. */
11849 fmt_str = c_getstr (fmt);
11850 if (fmt_str == NULL)
11851 return;
11853 if (!init_target_chars ())
11854 return;
11856 /* If the format doesn't contain % args or %%, we know its size. */
11857 if (strchr (fmt_str, target_percent) == 0)
11858 len = build_int_cstu (size_type_node, strlen (fmt_str));
11859 /* If the format is "%s" and first ... argument is a string literal,
11860 we know it too. */
11861 else if (fcode == BUILT_IN_SPRINTF_CHK
11862 && strcmp (fmt_str, target_percent_s) == 0)
11864 tree arg;
11866 if (nargs < 5)
11867 return;
11868 arg = CALL_EXPR_ARG (exp, 4);
11869 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11870 return;
11872 len = c_strlen (arg, 1);
11873 if (!len || ! host_integerp (len, 1))
11874 return;
11876 else
11877 return;
11879 if (! tree_int_cst_lt (len, size))
11881 warning (0, "%Kcall to %D will always overflow destination buffer",
11882 exp, get_callee_fndecl (exp));
11886 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11887 if possible. */
11889 tree
11890 fold_builtin_object_size (tree ptr, tree ost)
11892 tree ret = NULL_TREE;
11893 int object_size_type;
11895 if (!validate_arg (ptr, POINTER_TYPE)
11896 || !validate_arg (ost, INTEGER_TYPE))
11897 return NULL_TREE;
11899 STRIP_NOPS (ost);
11901 if (TREE_CODE (ost) != INTEGER_CST
11902 || tree_int_cst_sgn (ost) < 0
11903 || compare_tree_int (ost, 3) > 0)
11904 return NULL_TREE;
11906 object_size_type = tree_low_cst (ost, 0);
11908 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11909 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11910 and (size_t) 0 for types 2 and 3. */
11911 if (TREE_SIDE_EFFECTS (ptr))
11912 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11914 if (TREE_CODE (ptr) == ADDR_EXPR)
11915 ret = build_int_cstu (size_type_node,
11916 compute_builtin_object_size (ptr, object_size_type));
11918 else if (TREE_CODE (ptr) == SSA_NAME)
11920 unsigned HOST_WIDE_INT bytes;
11922 /* If object size is not known yet, delay folding until
11923 later. Maybe subsequent passes will help determining
11924 it. */
11925 bytes = compute_builtin_object_size (ptr, object_size_type);
11926 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11927 ? -1 : 0))
11928 ret = build_int_cstu (size_type_node, bytes);
11931 if (ret)
11933 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11934 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11935 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11936 ret = NULL_TREE;
11939 return ret;
11942 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11943 DEST, SRC, LEN, and SIZE are the arguments to the call.
11944 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11945 code of the builtin. If MAXLEN is not NULL, it is maximum length
11946 passed as third argument. */
11948 tree
11949 fold_builtin_memory_chk (tree fndecl,
11950 tree dest, tree src, tree len, tree size,
11951 tree maxlen, bool ignore,
11952 enum built_in_function fcode)
11954 tree fn;
11956 if (!validate_arg (dest, POINTER_TYPE)
11957 || !validate_arg (src,
11958 (fcode == BUILT_IN_MEMSET_CHK
11959 ? INTEGER_TYPE : POINTER_TYPE))
11960 || !validate_arg (len, INTEGER_TYPE)
11961 || !validate_arg (size, INTEGER_TYPE))
11962 return NULL_TREE;
11964 /* If SRC and DEST are the same (and not volatile), return DEST
11965 (resp. DEST+LEN for __mempcpy_chk). */
11966 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11968 if (fcode != BUILT_IN_MEMPCPY_CHK)
11969 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11970 else
11972 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11973 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11977 if (! host_integerp (size, 1))
11978 return NULL_TREE;
11980 if (! integer_all_onesp (size))
11982 if (! host_integerp (len, 1))
11984 /* If LEN is not constant, try MAXLEN too.
11985 For MAXLEN only allow optimizing into non-_ocs function
11986 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11987 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11989 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11991 /* (void) __mempcpy_chk () can be optimized into
11992 (void) __memcpy_chk (). */
11993 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11994 if (!fn)
11995 return NULL_TREE;
11997 return build_call_expr (fn, 4, dest, src, len, size);
11999 return NULL_TREE;
12002 else
12003 maxlen = len;
12005 if (tree_int_cst_lt (size, maxlen))
12006 return NULL_TREE;
12009 fn = NULL_TREE;
12010 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12011 mem{cpy,pcpy,move,set} is available. */
12012 switch (fcode)
12014 case BUILT_IN_MEMCPY_CHK:
12015 fn = built_in_decls[BUILT_IN_MEMCPY];
12016 break;
12017 case BUILT_IN_MEMPCPY_CHK:
12018 fn = built_in_decls[BUILT_IN_MEMPCPY];
12019 break;
12020 case BUILT_IN_MEMMOVE_CHK:
12021 fn = built_in_decls[BUILT_IN_MEMMOVE];
12022 break;
12023 case BUILT_IN_MEMSET_CHK:
12024 fn = built_in_decls[BUILT_IN_MEMSET];
12025 break;
12026 default:
12027 break;
12030 if (!fn)
12031 return NULL_TREE;
12033 return build_call_expr (fn, 3, dest, src, len);
12036 /* Fold a call to the __st[rp]cpy_chk builtin.
12037 DEST, SRC, and SIZE are the arguments to the call.
12038 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12039 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12040 strings passed as second argument. */
12042 tree
12043 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12044 tree maxlen, bool ignore,
12045 enum built_in_function fcode)
12047 tree len, fn;
12049 if (!validate_arg (dest, POINTER_TYPE)
12050 || !validate_arg (src, POINTER_TYPE)
12051 || !validate_arg (size, INTEGER_TYPE))
12052 return NULL_TREE;
12054 /* If SRC and DEST are the same (and not volatile), return DEST. */
12055 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12056 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12058 if (! host_integerp (size, 1))
12059 return NULL_TREE;
12061 if (! integer_all_onesp (size))
12063 len = c_strlen (src, 1);
12064 if (! len || ! host_integerp (len, 1))
12066 /* If LEN is not constant, try MAXLEN too.
12067 For MAXLEN only allow optimizing into non-_ocs function
12068 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12069 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12071 if (fcode == BUILT_IN_STPCPY_CHK)
12073 if (! ignore)
12074 return NULL_TREE;
12076 /* If return value of __stpcpy_chk is ignored,
12077 optimize into __strcpy_chk. */
12078 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12079 if (!fn)
12080 return NULL_TREE;
12082 return build_call_expr (fn, 3, dest, src, size);
12085 if (! len || TREE_SIDE_EFFECTS (len))
12086 return NULL_TREE;
12088 /* If c_strlen returned something, but not a constant,
12089 transform __strcpy_chk into __memcpy_chk. */
12090 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12091 if (!fn)
12092 return NULL_TREE;
12094 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12095 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12096 build_call_expr (fn, 4,
12097 dest, src, len, size));
12100 else
12101 maxlen = len;
12103 if (! tree_int_cst_lt (maxlen, size))
12104 return NULL_TREE;
12107 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12108 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12109 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12110 if (!fn)
12111 return NULL_TREE;
12113 return build_call_expr (fn, 2, dest, src);
12116 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12117 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12118 length passed as third argument. */
12120 tree
12121 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12122 tree maxlen)
12124 tree fn;
12126 if (!validate_arg (dest, POINTER_TYPE)
12127 || !validate_arg (src, POINTER_TYPE)
12128 || !validate_arg (len, INTEGER_TYPE)
12129 || !validate_arg (size, INTEGER_TYPE))
12130 return NULL_TREE;
12132 if (! host_integerp (size, 1))
12133 return NULL_TREE;
12135 if (! integer_all_onesp (size))
12137 if (! host_integerp (len, 1))
12139 /* If LEN is not constant, try MAXLEN too.
12140 For MAXLEN only allow optimizing into non-_ocs function
12141 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12142 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12143 return NULL_TREE;
12145 else
12146 maxlen = len;
12148 if (tree_int_cst_lt (size, maxlen))
12149 return NULL_TREE;
12152 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12153 fn = built_in_decls[BUILT_IN_STRNCPY];
12154 if (!fn)
12155 return NULL_TREE;
12157 return build_call_expr (fn, 3, dest, src, len);
12160 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12161 are the arguments to the call. */
12163 static tree
12164 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12166 tree fn;
12167 const char *p;
12169 if (!validate_arg (dest, POINTER_TYPE)
12170 || !validate_arg (src, POINTER_TYPE)
12171 || !validate_arg (size, INTEGER_TYPE))
12172 return NULL_TREE;
12174 p = c_getstr (src);
12175 /* If the SRC parameter is "", return DEST. */
12176 if (p && *p == '\0')
12177 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12179 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12180 return NULL_TREE;
12182 /* If __builtin_strcat_chk is used, assume strcat is available. */
12183 fn = built_in_decls[BUILT_IN_STRCAT];
12184 if (!fn)
12185 return NULL_TREE;
12187 return build_call_expr (fn, 2, dest, src);
12190 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12191 LEN, and SIZE. */
12193 static tree
12194 fold_builtin_strncat_chk (tree fndecl,
12195 tree dest, tree src, tree len, tree size)
12197 tree fn;
12198 const char *p;
12200 if (!validate_arg (dest, POINTER_TYPE)
12201 || !validate_arg (src, POINTER_TYPE)
12202 || !validate_arg (size, INTEGER_TYPE)
12203 || !validate_arg (size, INTEGER_TYPE))
12204 return NULL_TREE;
12206 p = c_getstr (src);
12207 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12208 if (p && *p == '\0')
12209 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12210 else if (integer_zerop (len))
12211 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12213 if (! host_integerp (size, 1))
12214 return NULL_TREE;
12216 if (! integer_all_onesp (size))
12218 tree src_len = c_strlen (src, 1);
12219 if (src_len
12220 && host_integerp (src_len, 1)
12221 && host_integerp (len, 1)
12222 && ! tree_int_cst_lt (len, src_len))
12224 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12225 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12226 if (!fn)
12227 return NULL_TREE;
12229 return build_call_expr (fn, 3, dest, src, size);
12231 return NULL_TREE;
12234 /* If __builtin_strncat_chk is used, assume strncat is available. */
12235 fn = built_in_decls[BUILT_IN_STRNCAT];
12236 if (!fn)
12237 return NULL_TREE;
12239 return build_call_expr (fn, 3, dest, src, len);
12242 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12243 a normal call should be emitted rather than expanding the function
12244 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12246 static tree
12247 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12249 tree dest, size, len, fn, fmt, flag;
12250 const char *fmt_str;
12251 int nargs = call_expr_nargs (exp);
12253 /* Verify the required arguments in the original call. */
12254 if (nargs < 4)
12255 return NULL_TREE;
12256 dest = CALL_EXPR_ARG (exp, 0);
12257 if (!validate_arg (dest, POINTER_TYPE))
12258 return NULL_TREE;
12259 flag = CALL_EXPR_ARG (exp, 1);
12260 if (!validate_arg (flag, INTEGER_TYPE))
12261 return NULL_TREE;
12262 size = CALL_EXPR_ARG (exp, 2);
12263 if (!validate_arg (size, INTEGER_TYPE))
12264 return NULL_TREE;
12265 fmt = CALL_EXPR_ARG (exp, 3);
12266 if (!validate_arg (fmt, POINTER_TYPE))
12267 return NULL_TREE;
12269 if (! host_integerp (size, 1))
12270 return NULL_TREE;
12272 len = NULL_TREE;
12274 if (!init_target_chars ())
12275 return NULL_TREE;
12277 /* Check whether the format is a literal string constant. */
12278 fmt_str = c_getstr (fmt);
12279 if (fmt_str != NULL)
12281 /* If the format doesn't contain % args or %%, we know the size. */
12282 if (strchr (fmt_str, target_percent) == 0)
12284 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12285 len = build_int_cstu (size_type_node, strlen (fmt_str));
12287 /* If the format is "%s" and first ... argument is a string literal,
12288 we know the size too. */
12289 else if (fcode == BUILT_IN_SPRINTF_CHK
12290 && strcmp (fmt_str, target_percent_s) == 0)
12292 tree arg;
12294 if (nargs == 5)
12296 arg = CALL_EXPR_ARG (exp, 4);
12297 if (validate_arg (arg, POINTER_TYPE))
12299 len = c_strlen (arg, 1);
12300 if (! len || ! host_integerp (len, 1))
12301 len = NULL_TREE;
12307 if (! integer_all_onesp (size))
12309 if (! len || ! tree_int_cst_lt (len, size))
12310 return NULL_TREE;
12313 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12314 or if format doesn't contain % chars or is "%s". */
12315 if (! integer_zerop (flag))
12317 if (fmt_str == NULL)
12318 return NULL_TREE;
12319 if (strchr (fmt_str, target_percent) != NULL
12320 && strcmp (fmt_str, target_percent_s))
12321 return NULL_TREE;
12324 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12325 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12326 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12327 if (!fn)
12328 return NULL_TREE;
12330 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12333 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12334 a normal call should be emitted rather than expanding the function
12335 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12336 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12337 passed as second argument. */
12339 tree
12340 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12341 enum built_in_function fcode)
12343 tree dest, size, len, fn, fmt, flag;
12344 const char *fmt_str;
12346 /* Verify the required arguments in the original call. */
12347 if (call_expr_nargs (exp) < 5)
12348 return NULL_TREE;
12349 dest = CALL_EXPR_ARG (exp, 0);
12350 if (!validate_arg (dest, POINTER_TYPE))
12351 return NULL_TREE;
12352 len = CALL_EXPR_ARG (exp, 1);
12353 if (!validate_arg (len, INTEGER_TYPE))
12354 return NULL_TREE;
12355 flag = CALL_EXPR_ARG (exp, 2);
12356 if (!validate_arg (flag, INTEGER_TYPE))
12357 return NULL_TREE;
12358 size = CALL_EXPR_ARG (exp, 3);
12359 if (!validate_arg (size, INTEGER_TYPE))
12360 return NULL_TREE;
12361 fmt = CALL_EXPR_ARG (exp, 4);
12362 if (!validate_arg (fmt, POINTER_TYPE))
12363 return NULL_TREE;
12365 if (! host_integerp (size, 1))
12366 return NULL_TREE;
12368 if (! integer_all_onesp (size))
12370 if (! host_integerp (len, 1))
12372 /* If LEN is not constant, try MAXLEN too.
12373 For MAXLEN only allow optimizing into non-_ocs function
12374 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12375 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12376 return NULL_TREE;
12378 else
12379 maxlen = len;
12381 if (tree_int_cst_lt (size, maxlen))
12382 return NULL_TREE;
12385 if (!init_target_chars ())
12386 return NULL_TREE;
12388 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12389 or if format doesn't contain % chars or is "%s". */
12390 if (! integer_zerop (flag))
12392 fmt_str = c_getstr (fmt);
12393 if (fmt_str == NULL)
12394 return NULL_TREE;
12395 if (strchr (fmt_str, target_percent) != NULL
12396 && strcmp (fmt_str, target_percent_s))
12397 return NULL_TREE;
12400 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12401 available. */
12402 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12403 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12404 if (!fn)
12405 return NULL_TREE;
12407 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12410 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12411 FMT and ARG are the arguments to the call; we don't fold cases with
12412 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12414 Return NULL_TREE if no simplification was possible, otherwise return the
12415 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12416 code of the function to be simplified. */
12418 static tree
12419 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12420 enum built_in_function fcode)
12422 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12423 const char *fmt_str = NULL;
12425 /* If the return value is used, don't do the transformation. */
12426 if (! ignore)
12427 return NULL_TREE;
12429 /* Verify the required arguments in the original call. */
12430 if (!validate_arg (fmt, POINTER_TYPE))
12431 return NULL_TREE;
12433 /* Check whether the format is a literal string constant. */
12434 fmt_str = c_getstr (fmt);
12435 if (fmt_str == NULL)
12436 return NULL_TREE;
12438 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12440 /* If we're using an unlocked function, assume the other
12441 unlocked functions exist explicitly. */
12442 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12443 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12445 else
12447 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12448 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12451 if (!init_target_chars ())
12452 return NULL_TREE;
12454 if (strcmp (fmt_str, target_percent_s) == 0
12455 || strchr (fmt_str, target_percent) == NULL)
12457 const char *str;
12459 if (strcmp (fmt_str, target_percent_s) == 0)
12461 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12462 return NULL_TREE;
12464 if (!arg || !validate_arg (arg, POINTER_TYPE))
12465 return NULL_TREE;
12467 str = c_getstr (arg);
12468 if (str == NULL)
12469 return NULL_TREE;
12471 else
12473 /* The format specifier doesn't contain any '%' characters. */
12474 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12475 && arg)
12476 return NULL_TREE;
12477 str = fmt_str;
12480 /* If the string was "", printf does nothing. */
12481 if (str[0] == '\0')
12482 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12484 /* If the string has length of 1, call putchar. */
12485 if (str[1] == '\0')
12487 /* Given printf("c"), (where c is any one character,)
12488 convert "c"[0] to an int and pass that to the replacement
12489 function. */
12490 newarg = build_int_cst (NULL_TREE, str[0]);
12491 if (fn_putchar)
12492 call = build_call_expr (fn_putchar, 1, newarg);
12494 else
12496 /* If the string was "string\n", call puts("string"). */
12497 size_t len = strlen (str);
12498 if ((unsigned char)str[len - 1] == target_newline)
12500 /* Create a NUL-terminated string that's one char shorter
12501 than the original, stripping off the trailing '\n'. */
12502 char *newstr = alloca (len);
12503 memcpy (newstr, str, len - 1);
12504 newstr[len - 1] = 0;
12506 newarg = build_string_literal (len, newstr);
12507 if (fn_puts)
12508 call = build_call_expr (fn_puts, 1, newarg);
12510 else
12511 /* We'd like to arrange to call fputs(string,stdout) here,
12512 but we need stdout and don't have a way to get it yet. */
12513 return NULL_TREE;
12517 /* The other optimizations can be done only on the non-va_list variants. */
12518 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12519 return NULL_TREE;
12521 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12522 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12524 if (!arg || !validate_arg (arg, POINTER_TYPE))
12525 return NULL_TREE;
12526 if (fn_puts)
12527 call = build_call_expr (fn_puts, 1, arg);
12530 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12531 else if (strcmp (fmt_str, target_percent_c) == 0)
12533 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12534 return NULL_TREE;
12535 if (fn_putchar)
12536 call = build_call_expr (fn_putchar, 1, arg);
12539 if (!call)
12540 return NULL_TREE;
12542 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12545 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12546 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12547 more than 3 arguments, and ARG may be null in the 2-argument case.
12549 Return NULL_TREE if no simplification was possible, otherwise return the
12550 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12551 code of the function to be simplified. */
12553 static tree
12554 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12555 enum built_in_function fcode)
12557 tree fn_fputc, fn_fputs, call = NULL_TREE;
12558 const char *fmt_str = NULL;
12560 /* If the return value is used, don't do the transformation. */
12561 if (! ignore)
12562 return NULL_TREE;
12564 /* Verify the required arguments in the original call. */
12565 if (!validate_arg (fp, POINTER_TYPE))
12566 return NULL_TREE;
12567 if (!validate_arg (fmt, POINTER_TYPE))
12568 return NULL_TREE;
12570 /* Check whether the format is a literal string constant. */
12571 fmt_str = c_getstr (fmt);
12572 if (fmt_str == NULL)
12573 return NULL_TREE;
12575 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12577 /* If we're using an unlocked function, assume the other
12578 unlocked functions exist explicitly. */
12579 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12580 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12582 else
12584 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12585 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12588 if (!init_target_chars ())
12589 return NULL_TREE;
12591 /* If the format doesn't contain % args or %%, use strcpy. */
12592 if (strchr (fmt_str, target_percent) == NULL)
12594 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12595 && arg)
12596 return NULL_TREE;
12598 /* If the format specifier was "", fprintf does nothing. */
12599 if (fmt_str[0] == '\0')
12601 /* If FP has side-effects, just wait until gimplification is
12602 done. */
12603 if (TREE_SIDE_EFFECTS (fp))
12604 return NULL_TREE;
12606 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12609 /* When "string" doesn't contain %, replace all cases of
12610 fprintf (fp, string) with fputs (string, fp). The fputs
12611 builtin will take care of special cases like length == 1. */
12612 if (fn_fputs)
12613 call = build_call_expr (fn_fputs, 2, fmt, fp);
12616 /* The other optimizations can be done only on the non-va_list variants. */
12617 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12618 return NULL_TREE;
12620 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12621 else if (strcmp (fmt_str, target_percent_s) == 0)
12623 if (!arg || !validate_arg (arg, POINTER_TYPE))
12624 return NULL_TREE;
12625 if (fn_fputs)
12626 call = build_call_expr (fn_fputs, 2, arg, fp);
12629 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12630 else if (strcmp (fmt_str, target_percent_c) == 0)
12632 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12633 return NULL_TREE;
12634 if (fn_fputc)
12635 call = build_call_expr (fn_fputc, 2, arg, fp);
12638 if (!call)
12639 return NULL_TREE;
12640 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12643 /* Initialize format string characters in the target charset. */
12645 static bool
12646 init_target_chars (void)
12648 static bool init;
12649 if (!init)
12651 target_newline = lang_hooks.to_target_charset ('\n');
12652 target_percent = lang_hooks.to_target_charset ('%');
12653 target_c = lang_hooks.to_target_charset ('c');
12654 target_s = lang_hooks.to_target_charset ('s');
12655 if (target_newline == 0 || target_percent == 0 || target_c == 0
12656 || target_s == 0)
12657 return false;
12659 target_percent_c[0] = target_percent;
12660 target_percent_c[1] = target_c;
12661 target_percent_c[2] = '\0';
12663 target_percent_s[0] = target_percent;
12664 target_percent_s[1] = target_s;
12665 target_percent_s[2] = '\0';
12667 target_percent_s_newline[0] = target_percent;
12668 target_percent_s_newline[1] = target_s;
12669 target_percent_s_newline[2] = target_newline;
12670 target_percent_s_newline[3] = '\0';
12672 init = true;
12674 return true;
12677 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12678 and no overflow/underflow occurred. INEXACT is true if M was not
12679 exactly calculated. TYPE is the tree type for the result. This
12680 function assumes that you cleared the MPFR flags and then
12681 calculated M to see if anything subsequently set a flag prior to
12682 entering this function. Return NULL_TREE if any checks fail. */
12684 static tree
12685 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12687 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12688 overflow/underflow occurred. If -frounding-math, proceed iff the
12689 result of calling FUNC was exact. */
12690 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12691 && (!flag_rounding_math || !inexact))
12693 REAL_VALUE_TYPE rr;
12695 real_from_mpfr (&rr, m, type, GMP_RNDN);
12696 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12697 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12698 but the mpft_t is not, then we underflowed in the
12699 conversion. */
12700 if (real_isfinite (&rr)
12701 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12703 REAL_VALUE_TYPE rmode;
12705 real_convert (&rmode, TYPE_MODE (type), &rr);
12706 /* Proceed iff the specified mode can hold the value. */
12707 if (real_identical (&rmode, &rr))
12708 return build_real (type, rmode);
12711 return NULL_TREE;
12714 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12715 FUNC on it and return the resulting value as a tree with type TYPE.
12716 If MIN and/or MAX are not NULL, then the supplied ARG must be
12717 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12718 acceptable values, otherwise they are not. The mpfr precision is
12719 set to the precision of TYPE. We assume that function FUNC returns
12720 zero if the result could be calculated exactly within the requested
12721 precision. */
12723 static tree
12724 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12725 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12726 bool inclusive)
12728 tree result = NULL_TREE;
12730 STRIP_NOPS (arg);
12732 /* To proceed, MPFR must exactly represent the target floating point
12733 format, which only happens when the target base equals two. */
12734 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12735 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12737 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12739 if (real_isfinite (ra)
12740 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12741 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12743 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12744 int inexact;
12745 mpfr_t m;
12747 mpfr_init2 (m, prec);
12748 mpfr_from_real (m, ra, GMP_RNDN);
12749 mpfr_clear_flags ();
12750 inexact = func (m, m, GMP_RNDN);
12751 result = do_mpfr_ckconv (m, type, inexact);
12752 mpfr_clear (m);
12756 return result;
12759 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12760 FUNC on it and return the resulting value as a tree with type TYPE.
12761 The mpfr precision is set to the precision of TYPE. We assume that
12762 function FUNC returns zero if the result could be calculated
12763 exactly within the requested precision. */
12765 static tree
12766 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12767 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12769 tree result = NULL_TREE;
12771 STRIP_NOPS (arg1);
12772 STRIP_NOPS (arg2);
12774 /* To proceed, MPFR must exactly represent the target floating point
12775 format, which only happens when the target base equals two. */
12776 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12777 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12778 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12780 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12781 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12783 if (real_isfinite (ra1) && real_isfinite (ra2))
12785 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12786 int inexact;
12787 mpfr_t m1, m2;
12789 mpfr_inits2 (prec, m1, m2, NULL);
12790 mpfr_from_real (m1, ra1, GMP_RNDN);
12791 mpfr_from_real (m2, ra2, GMP_RNDN);
12792 mpfr_clear_flags ();
12793 inexact = func (m1, m1, m2, GMP_RNDN);
12794 result = do_mpfr_ckconv (m1, type, inexact);
12795 mpfr_clears (m1, m2, NULL);
12799 return result;
12802 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12803 FUNC on it and return the resulting value as a tree with type TYPE.
12804 The mpfr precision is set to the precision of TYPE. We assume that
12805 function FUNC returns zero if the result could be calculated
12806 exactly within the requested precision. */
12808 static tree
12809 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12810 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12812 tree result = NULL_TREE;
12814 STRIP_NOPS (arg1);
12815 STRIP_NOPS (arg2);
12816 STRIP_NOPS (arg3);
12818 /* To proceed, MPFR must exactly represent the target floating point
12819 format, which only happens when the target base equals two. */
12820 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12821 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12822 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12823 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12825 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12826 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12827 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12829 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12831 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12832 int inexact;
12833 mpfr_t m1, m2, m3;
12835 mpfr_inits2 (prec, m1, m2, m3, NULL);
12836 mpfr_from_real (m1, ra1, GMP_RNDN);
12837 mpfr_from_real (m2, ra2, GMP_RNDN);
12838 mpfr_from_real (m3, ra3, GMP_RNDN);
12839 mpfr_clear_flags ();
12840 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12841 result = do_mpfr_ckconv (m1, type, inexact);
12842 mpfr_clears (m1, m2, m3, NULL);
12846 return result;
12849 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12850 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12851 If ARG_SINP and ARG_COSP are NULL then the result is returned
12852 as a complex value.
12853 The type is taken from the type of ARG and is used for setting the
12854 precision of the calculation and results. */
12856 static tree
12857 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12859 tree const type = TREE_TYPE (arg);
12860 tree result = NULL_TREE;
12862 STRIP_NOPS (arg);
12864 /* To proceed, MPFR must exactly represent the target floating point
12865 format, which only happens when the target base equals two. */
12866 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12867 && TREE_CODE (arg) == REAL_CST
12868 && !TREE_OVERFLOW (arg))
12870 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12872 if (real_isfinite (ra))
12874 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12875 tree result_s, result_c;
12876 int inexact;
12877 mpfr_t m, ms, mc;
12879 mpfr_inits2 (prec, m, ms, mc, NULL);
12880 mpfr_from_real (m, ra, GMP_RNDN);
12881 mpfr_clear_flags ();
12882 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12883 result_s = do_mpfr_ckconv (ms, type, inexact);
12884 result_c = do_mpfr_ckconv (mc, type, inexact);
12885 mpfr_clears (m, ms, mc, NULL);
12886 if (result_s && result_c)
12888 /* If we are to return in a complex value do so. */
12889 if (!arg_sinp && !arg_cosp)
12890 return build_complex (build_complex_type (type),
12891 result_c, result_s);
12893 /* Dereference the sin/cos pointer arguments. */
12894 arg_sinp = build_fold_indirect_ref (arg_sinp);
12895 arg_cosp = build_fold_indirect_ref (arg_cosp);
12896 /* Proceed if valid pointer type were passed in. */
12897 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12898 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12900 /* Set the values. */
12901 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12902 result_s);
12903 TREE_SIDE_EFFECTS (result_s) = 1;
12904 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12905 result_c);
12906 TREE_SIDE_EFFECTS (result_c) = 1;
12907 /* Combine the assignments into a compound expr. */
12908 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12909 result_s, result_c));
12914 return result;
12917 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12918 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12919 two-argument mpfr order N Bessel function FUNC on them and return
12920 the resulting value as a tree with type TYPE. The mpfr precision
12921 is set to the precision of TYPE. We assume that function FUNC
12922 returns zero if the result could be calculated exactly within the
12923 requested precision. */
12924 static tree
12925 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12926 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12927 const REAL_VALUE_TYPE *min, bool inclusive)
12929 tree result = NULL_TREE;
12931 STRIP_NOPS (arg1);
12932 STRIP_NOPS (arg2);
12934 /* To proceed, MPFR must exactly represent the target floating point
12935 format, which only happens when the target base equals two. */
12936 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12937 && host_integerp (arg1, 0)
12938 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12940 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12941 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12943 if (n == (long)n
12944 && real_isfinite (ra)
12945 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12947 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12948 int inexact;
12949 mpfr_t m;
12951 mpfr_init2 (m, prec);
12952 mpfr_from_real (m, ra, GMP_RNDN);
12953 mpfr_clear_flags ();
12954 inexact = func (m, n, m, GMP_RNDN);
12955 result = do_mpfr_ckconv (m, type, inexact);
12956 mpfr_clear (m);
12960 return result;
12963 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12964 the pointer *(ARG_QUO) and return the result. The type is taken
12965 from the type of ARG0 and is used for setting the precision of the
12966 calculation and results. */
12968 static tree
12969 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12971 tree const type = TREE_TYPE (arg0);
12972 tree result = NULL_TREE;
12974 STRIP_NOPS (arg0);
12975 STRIP_NOPS (arg1);
12977 /* To proceed, MPFR must exactly represent the target floating point
12978 format, which only happens when the target base equals two. */
12979 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12980 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12981 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12983 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12984 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12986 if (real_isfinite (ra0) && real_isfinite (ra1))
12988 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12989 tree result_rem;
12990 long integer_quo;
12991 mpfr_t m0, m1;
12993 mpfr_inits2 (prec, m0, m1, NULL);
12994 mpfr_from_real (m0, ra0, GMP_RNDN);
12995 mpfr_from_real (m1, ra1, GMP_RNDN);
12996 mpfr_clear_flags ();
12997 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
12998 /* Remquo is independent of the rounding mode, so pass
12999 inexact=0 to do_mpfr_ckconv(). */
13000 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13001 mpfr_clears (m0, m1, NULL);
13002 if (result_rem)
13004 /* MPFR calculates quo in the host's long so it may
13005 return more bits in quo than the target int can hold
13006 if sizeof(host long) > sizeof(target int). This can
13007 happen even for native compilers in LP64 mode. In
13008 these cases, modulo the quo value with the largest
13009 number that the target int can hold while leaving one
13010 bit for the sign. */
13011 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13012 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13014 /* Dereference the quo pointer argument. */
13015 arg_quo = build_fold_indirect_ref (arg_quo);
13016 /* Proceed iff a valid pointer type was passed in. */
13017 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13019 /* Set the value. */
13020 tree result_quo = fold_build2 (MODIFY_EXPR,
13021 TREE_TYPE (arg_quo), arg_quo,
13022 build_int_cst (NULL, integer_quo));
13023 TREE_SIDE_EFFECTS (result_quo) = 1;
13024 /* Combine the quo assignment with the rem. */
13025 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13026 result_quo, result_rem));
13031 return result;
13034 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13035 resulting value as a tree with type TYPE. The mpfr precision is
13036 set to the precision of TYPE. We assume that this mpfr function
13037 returns zero if the result could be calculated exactly within the
13038 requested precision. In addition, the integer pointer represented
13039 by ARG_SG will be dereferenced and set to the appropriate signgam
13040 (-1,1) value. */
13042 static tree
13043 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13045 tree result = NULL_TREE;
13047 STRIP_NOPS (arg);
13049 /* To proceed, MPFR must exactly represent the target floating point
13050 format, which only happens when the target base equals two. Also
13051 verify ARG is a constant and that ARG_SG is an int pointer. */
13052 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13053 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13054 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13055 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13057 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13059 /* In addition to NaN and Inf, the argument cannot be zero or a
13060 negative integer. */
13061 if (real_isfinite (ra)
13062 && ra->cl != rvc_zero
13063 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13065 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13066 int inexact, sg;
13067 mpfr_t m;
13068 tree result_lg;
13070 mpfr_init2 (m, prec);
13071 mpfr_from_real (m, ra, GMP_RNDN);
13072 mpfr_clear_flags ();
13073 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
13074 result_lg = do_mpfr_ckconv (m, type, inexact);
13075 mpfr_clear (m);
13076 if (result_lg)
13078 tree result_sg;
13080 /* Dereference the arg_sg pointer argument. */
13081 arg_sg = build_fold_indirect_ref (arg_sg);
13082 /* Assign the signgam value into *arg_sg. */
13083 result_sg = fold_build2 (MODIFY_EXPR,
13084 TREE_TYPE (arg_sg), arg_sg,
13085 build_int_cst (NULL, sg));
13086 TREE_SIDE_EFFECTS (result_sg) = 1;
13087 /* Combine the signgam assignment with the lgamma result. */
13088 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13089 result_sg, result_lg));
13094 return result;
13096 #endif