libfuncs.h (LTI_synchronize): New libfunc_index.
[official-gcc.git] / gcc / builtins.c
blob10af34254aad7fed970de1bb616033c51f85e5d5
1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tree-gimple.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "output.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "predict.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
56 #endif
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names[(int) END_BUILTINS] =
65 #include "builtins.def"
67 #undef DEF_BUILTIN
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls[(int) END_BUILTINS];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls[(int) END_BUILTINS];
77 static const char *c_getstr (tree);
78 static rtx c_readstr (const char *, enum machine_mode);
79 static int target_char_cast (tree, char *);
80 static rtx get_memory_rtx (tree, tree);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx result_vector (int, rtx);
85 #endif
86 static void expand_builtin_update_setjmp_buf (rtx);
87 static void expand_builtin_prefetch (tree);
88 static rtx expand_builtin_apply_args (void);
89 static rtx expand_builtin_apply_args_1 (void);
90 static rtx expand_builtin_apply (rtx, rtx, rtx);
91 static void expand_builtin_return (rtx);
92 static enum type_class type_to_class (tree);
93 static rtx expand_builtin_classify_type (tree);
94 static void expand_errno_check (tree, rtx);
95 static rtx expand_builtin_mathfn (tree, rtx, rtx);
96 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
97 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
98 static rtx expand_builtin_interclass_mathfn (tree, rtx, rtx);
99 static rtx expand_builtin_sincos (tree);
100 static rtx expand_builtin_cexpi (tree, rtx, rtx);
101 static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
102 static rtx expand_builtin_int_roundingfn_2 (tree, rtx, rtx);
103 static rtx expand_builtin_args_info (tree);
104 static rtx expand_builtin_next_arg (void);
105 static rtx expand_builtin_va_start (tree);
106 static rtx expand_builtin_va_end (tree);
107 static rtx expand_builtin_va_copy (tree);
108 static rtx expand_builtin_memchr (tree, rtx, enum machine_mode);
109 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
110 static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
111 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
112 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
113 static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
114 static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
115 static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
116 static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
117 static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
118 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
119 static rtx expand_builtin_mempcpy_args (tree, tree, tree, tree, rtx,
120 enum machine_mode, int);
121 static rtx expand_builtin_memmove (tree, rtx, enum machine_mode, int);
122 static rtx expand_builtin_memmove_args (tree, tree, tree, tree, rtx,
123 enum machine_mode, int);
124 static rtx expand_builtin_bcopy (tree, int);
125 static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
126 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx, enum machine_mode);
127 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
132 static rtx expand_builtin_bzero (tree);
133 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_strstr (tree, rtx, enum machine_mode);
135 static rtx expand_builtin_strpbrk (tree, rtx, enum machine_mode);
136 static rtx expand_builtin_strchr (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_strrchr (tree, rtx, enum machine_mode);
138 static rtx expand_builtin_alloca (tree, rtx);
139 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static rtx expand_builtin_fputs (tree, rtx, bool);
142 static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
143 static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
144 static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
145 static tree stabilize_va_list (tree, int);
146 static rtx expand_builtin_expect (tree, rtx);
147 static tree fold_builtin_constant_p (tree);
148 static tree fold_builtin_expect (tree, tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (tree);
151 static tree fold_builtin_inf (tree, int);
152 static tree fold_builtin_nan (tree, tree, int);
153 static tree rewrite_call_expr (tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static bool integer_valued_real_p (tree);
156 static tree fold_trunc_transparent_mathfn (tree, tree);
157 static bool readonly_data_expr (tree);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_sqrt (tree, tree);
161 static tree fold_builtin_cbrt (tree, tree);
162 static tree fold_builtin_pow (tree, tree, tree, tree);
163 static tree fold_builtin_powi (tree, tree, tree, tree);
164 static tree fold_builtin_cos (tree, tree, tree);
165 static tree fold_builtin_cosh (tree, tree, tree);
166 static tree fold_builtin_tan (tree, tree);
167 static tree fold_builtin_trunc (tree, tree);
168 static tree fold_builtin_floor (tree, tree);
169 static tree fold_builtin_ceil (tree, tree);
170 static tree fold_builtin_round (tree, tree);
171 static tree fold_builtin_int_roundingfn (tree, tree);
172 static tree fold_builtin_bitop (tree, tree);
173 static tree fold_builtin_memory_op (tree, tree, tree, tree, bool, int);
174 static tree fold_builtin_strchr (tree, tree, tree);
175 static tree fold_builtin_memchr (tree, tree, tree, tree);
176 static tree fold_builtin_memcmp (tree, tree, tree);
177 static tree fold_builtin_strcmp (tree, tree);
178 static tree fold_builtin_strncmp (tree, tree, tree);
179 static tree fold_builtin_signbit (tree, tree);
180 static tree fold_builtin_copysign (tree, tree, tree, tree);
181 static tree fold_builtin_isascii (tree);
182 static tree fold_builtin_toascii (tree);
183 static tree fold_builtin_isdigit (tree);
184 static tree fold_builtin_fabs (tree, tree);
185 static tree fold_builtin_abs (tree, tree);
186 static tree fold_builtin_unordered_cmp (tree, tree, tree, enum tree_code,
187 enum tree_code);
188 static tree fold_builtin_n (tree, tree *, int, bool);
189 static tree fold_builtin_0 (tree, bool);
190 static tree fold_builtin_1 (tree, tree, bool);
191 static tree fold_builtin_2 (tree, tree, tree, bool);
192 static tree fold_builtin_3 (tree, tree, tree, tree, bool);
193 static tree fold_builtin_4 (tree, tree, tree, tree, tree, bool);
194 static tree fold_builtin_varargs (tree, tree, bool);
196 static tree fold_builtin_strpbrk (tree, tree, tree);
197 static tree fold_builtin_strstr (tree, tree, tree);
198 static tree fold_builtin_strrchr (tree, tree, tree);
199 static tree fold_builtin_strcat (tree, tree);
200 static tree fold_builtin_strncat (tree, tree, tree);
201 static tree fold_builtin_strspn (tree, tree);
202 static tree fold_builtin_strcspn (tree, tree);
203 static tree fold_builtin_sprintf (tree, tree, tree, int);
205 static rtx expand_builtin_object_size (tree);
206 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
207 enum built_in_function);
208 static void maybe_emit_chk_warning (tree, enum built_in_function);
209 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
210 static tree fold_builtin_object_size (tree, tree);
211 static tree fold_builtin_strcat_chk (tree, tree, tree, tree);
212 static tree fold_builtin_strncat_chk (tree, tree, tree, tree, tree);
213 static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
214 static tree fold_builtin_printf (tree, tree, tree, bool, enum built_in_function);
215 static tree fold_builtin_fprintf (tree, tree, tree, tree, bool,
216 enum built_in_function);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline;
220 static unsigned HOST_WIDE_INT target_percent;
221 static unsigned HOST_WIDE_INT target_c;
222 static unsigned HOST_WIDE_INT target_s;
223 static char target_percent_c[3];
224 static char target_percent_s[3];
225 static char target_percent_s_newline[4];
226 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
227 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
228 static tree do_mpfr_arg2 (tree, tree, tree,
229 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
230 static tree do_mpfr_arg3 (tree, tree, tree, tree,
231 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
232 static tree do_mpfr_sincos (tree, tree, tree);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree do_mpfr_bessel_n (tree, tree, tree,
235 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
236 const REAL_VALUE_TYPE *, bool);
237 static tree do_mpfr_remquo (tree, tree, tree);
238 static tree do_mpfr_lgamma_r (tree, tree, tree);
239 #endif
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node)
247 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
248 if (strncmp (name, "__builtin_", 10) == 0)
249 return true;
250 if (strncmp (name, "__sync_", 7) == 0)
251 return true;
252 return false;
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp, unsigned int max_align)
266 unsigned int align, inner;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize && flag_tree_ter))
270 return 0;
272 if (!POINTER_TYPE_P (TREE_TYPE (exp)))
273 return 0;
275 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
276 align = MIN (align, max_align);
278 while (1)
280 switch (TREE_CODE (exp))
282 CASE_CONVERT:
283 exp = TREE_OPERAND (exp, 0);
284 if (! POINTER_TYPE_P (TREE_TYPE (exp)))
285 return align;
287 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
288 align = MIN (inner, max_align);
289 break;
291 case POINTER_PLUS_EXPR:
292 /* If sum of pointer + int, restrict our maximum alignment to that
293 imposed by the integer. If not, we can't do any better than
294 ALIGN. */
295 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
296 return align;
298 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
299 & (max_align / BITS_PER_UNIT - 1))
300 != 0)
301 max_align >>= 1;
303 exp = TREE_OPERAND (exp, 0);
304 break;
306 case ADDR_EXPR:
307 /* See what we are pointing at and look at its alignment. */
308 exp = TREE_OPERAND (exp, 0);
309 inner = max_align;
310 if (handled_component_p (exp))
312 HOST_WIDE_INT bitsize, bitpos;
313 tree offset;
314 enum machine_mode mode;
315 int unsignedp, volatilep;
317 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
318 &mode, &unsignedp, &volatilep, true);
319 if (bitpos)
320 inner = MIN (inner, (unsigned) (bitpos & -bitpos));
321 if (offset && TREE_CODE (offset) == PLUS_EXPR
322 && host_integerp (TREE_OPERAND (offset, 1), 1))
324 /* Any overflow in calculating offset_bits won't change
325 the alignment. */
326 unsigned offset_bits
327 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
328 * BITS_PER_UNIT);
330 if (offset_bits)
331 inner = MIN (inner, (offset_bits & -offset_bits));
332 offset = TREE_OPERAND (offset, 0);
334 if (offset && TREE_CODE (offset) == MULT_EXPR
335 && host_integerp (TREE_OPERAND (offset, 1), 1))
337 /* Any overflow in calculating offset_factor won't change
338 the alignment. */
339 unsigned offset_factor
340 = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
341 * BITS_PER_UNIT);
343 if (offset_factor)
344 inner = MIN (inner, (offset_factor & -offset_factor));
346 else if (offset)
347 inner = MIN (inner, BITS_PER_UNIT);
349 if (DECL_P (exp))
350 align = MIN (inner, DECL_ALIGN (exp));
351 #ifdef CONSTANT_ALIGNMENT
352 else if (CONSTANT_CLASS_P (exp))
353 align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
354 #endif
355 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
356 || TREE_CODE (exp) == INDIRECT_REF)
357 align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
358 else
359 align = MIN (align, inner);
360 return MIN (align, max_align);
362 default:
363 return align;
368 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
369 way, because it could contain a zero byte in the middle.
370 TREE_STRING_LENGTH is the size of the character array, not the string.
372 ONLY_VALUE should be nonzero if the result is not going to be emitted
373 into the instruction stream and zero if it is going to be expanded.
374 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
375 is returned, otherwise NULL, since
376 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
377 evaluate the side-effects.
379 The value returned is of type `ssizetype'.
381 Unfortunately, string_constant can't access the values of const char
382 arrays with initializers, so neither can we do so here. */
384 tree
385 c_strlen (tree src, int only_value)
387 tree offset_node;
388 HOST_WIDE_INT offset;
389 int max;
390 const char *ptr;
392 STRIP_NOPS (src);
393 if (TREE_CODE (src) == COND_EXPR
394 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
396 tree len1, len2;
398 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
399 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
400 if (tree_int_cst_equal (len1, len2))
401 return len1;
404 if (TREE_CODE (src) == COMPOUND_EXPR
405 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
406 return c_strlen (TREE_OPERAND (src, 1), only_value);
408 src = string_constant (src, &offset_node);
409 if (src == 0)
410 return NULL_TREE;
412 max = TREE_STRING_LENGTH (src) - 1;
413 ptr = TREE_STRING_POINTER (src);
415 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
417 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
418 compute the offset to the following null if we don't know where to
419 start searching for it. */
420 int i;
422 for (i = 0; i < max; i++)
423 if (ptr[i] == 0)
424 return NULL_TREE;
426 /* We don't know the starting offset, but we do know that the string
427 has no internal zero bytes. We can assume that the offset falls
428 within the bounds of the string; otherwise, the programmer deserves
429 what he gets. Subtract the offset from the length of the string,
430 and return that. This would perhaps not be valid if we were dealing
431 with named arrays in addition to literal string constants. */
433 return size_diffop (size_int (max), offset_node);
436 /* We have a known offset into the string. Start searching there for
437 a null character if we can represent it as a single HOST_WIDE_INT. */
438 if (offset_node == 0)
439 offset = 0;
440 else if (! host_integerp (offset_node, 0))
441 offset = -1;
442 else
443 offset = tree_low_cst (offset_node, 0);
445 /* If the offset is known to be out of bounds, warn, and call strlen at
446 runtime. */
447 if (offset < 0 || offset > max)
449 /* Suppress multiple warnings for propagated constant strings. */
450 if (! TREE_NO_WARNING (src))
452 warning (0, "offset outside bounds of constant string");
453 TREE_NO_WARNING (src) = 1;
455 return NULL_TREE;
458 /* Use strlen to search for the first zero byte. Since any strings
459 constructed with build_string will have nulls appended, we win even
460 if we get handed something like (char[4])"abcd".
462 Since OFFSET is our starting index into the string, no further
463 calculation is needed. */
464 return ssize_int (strlen (ptr + offset));
467 /* Return a char pointer for a C string if it is a string constant
468 or sum of string constant and integer constant. */
470 static const char *
471 c_getstr (tree src)
473 tree offset_node;
475 src = string_constant (src, &offset_node);
476 if (src == 0)
477 return 0;
479 if (offset_node == 0)
480 return TREE_STRING_POINTER (src);
481 else if (!host_integerp (offset_node, 1)
482 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
483 return 0;
485 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
488 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
489 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
491 static rtx
492 c_readstr (const char *str, enum machine_mode mode)
494 HOST_WIDE_INT c[2];
495 HOST_WIDE_INT ch;
496 unsigned int i, j;
498 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
500 c[0] = 0;
501 c[1] = 0;
502 ch = 1;
503 for (i = 0; i < GET_MODE_SIZE (mode); i++)
505 j = i;
506 if (WORDS_BIG_ENDIAN)
507 j = GET_MODE_SIZE (mode) - i - 1;
508 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
509 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
510 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
511 j *= BITS_PER_UNIT;
512 gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
514 if (ch)
515 ch = (unsigned char) str[i];
516 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
518 return immed_double_const (c[0], c[1], mode);
521 /* Cast a target constant CST to target CHAR and if that value fits into
522 host char type, return zero and put that value into variable pointed to by
523 P. */
525 static int
526 target_char_cast (tree cst, char *p)
528 unsigned HOST_WIDE_INT val, hostval;
530 if (!host_integerp (cst, 1)
531 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
532 return 1;
534 val = tree_low_cst (cst, 1);
535 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
536 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
538 hostval = val;
539 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
540 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
542 if (val != hostval)
543 return 1;
545 *p = hostval;
546 return 0;
549 /* Similar to save_expr, but assumes that arbitrary code is not executed
550 in between the multiple evaluations. In particular, we assume that a
551 non-addressable local variable will not be modified. */
553 static tree
554 builtin_save_expr (tree exp)
556 if (TREE_ADDRESSABLE (exp) == 0
557 && (TREE_CODE (exp) == PARM_DECL
558 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
559 return exp;
561 return save_expr (exp);
564 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
565 times to get the address of either a higher stack frame, or a return
566 address located within it (depending on FNDECL_CODE). */
568 static rtx
569 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
571 int i;
573 #ifdef INITIAL_FRAME_ADDRESS_RTX
574 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
575 #else
576 rtx tem;
578 /* For a zero count with __builtin_return_address, we don't care what
579 frame address we return, because target-specific definitions will
580 override us. Therefore frame pointer elimination is OK, and using
581 the soft frame pointer is OK.
583 For a nonzero count, or a zero count with __builtin_frame_address,
584 we require a stable offset from the current frame pointer to the
585 previous one, so we must use the hard frame pointer, and
586 we must disable frame pointer elimination. */
587 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
588 tem = frame_pointer_rtx;
589 else
591 tem = hard_frame_pointer_rtx;
593 /* Tell reload not to eliminate the frame pointer. */
594 crtl->accesses_prior_frames = 1;
596 #endif
598 /* Some machines need special handling before we can access
599 arbitrary frames. For example, on the SPARC, we must first flush
600 all register windows to the stack. */
601 #ifdef SETUP_FRAME_ADDRESSES
602 if (count > 0)
603 SETUP_FRAME_ADDRESSES ();
604 #endif
606 /* On the SPARC, the return address is not in the frame, it is in a
607 register. There is no way to access it off of the current frame
608 pointer, but it can be accessed off the previous frame pointer by
609 reading the value from the register window save area. */
610 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
611 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
612 count--;
613 #endif
615 /* Scan back COUNT frames to the specified frame. */
616 for (i = 0; i < count; i++)
618 /* Assume the dynamic chain pointer is in the word that the
619 frame address points to, unless otherwise specified. */
620 #ifdef DYNAMIC_CHAIN_ADDRESS
621 tem = DYNAMIC_CHAIN_ADDRESS (tem);
622 #endif
623 tem = memory_address (Pmode, tem);
624 tem = gen_frame_mem (Pmode, tem);
625 tem = copy_to_reg (tem);
628 /* For __builtin_frame_address, return what we've got. But, on
629 the SPARC for example, we may have to add a bias. */
630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
631 #ifdef FRAME_ADDR_RTX
632 return FRAME_ADDR_RTX (tem);
633 #else
634 return tem;
635 #endif
637 /* For __builtin_return_address, get the return address from that frame. */
638 #ifdef RETURN_ADDR_RTX
639 tem = RETURN_ADDR_RTX (count, tem);
640 #else
641 tem = memory_address (Pmode,
642 plus_constant (tem, GET_MODE_SIZE (Pmode)));
643 tem = gen_frame_mem (Pmode, tem);
644 #endif
645 return tem;
648 /* Alias set used for setjmp buffer. */
649 static alias_set_type setjmp_alias_set = -1;
651 /* Construct the leading half of a __builtin_setjmp call. Control will
652 return to RECEIVER_LABEL. This is also called directly by the SJLJ
653 exception handling code. */
655 void
656 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
658 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
659 rtx stack_save;
660 rtx mem;
662 if (setjmp_alias_set == -1)
663 setjmp_alias_set = new_alias_set ();
665 buf_addr = convert_memory_address (Pmode, buf_addr);
667 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
669 /* We store the frame pointer and the address of receiver_label in
670 the buffer and use the rest of it for the stack save area, which
671 is machine-dependent. */
673 mem = gen_rtx_MEM (Pmode, buf_addr);
674 set_mem_alias_set (mem, setjmp_alias_set);
675 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
677 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
678 set_mem_alias_set (mem, setjmp_alias_set);
680 emit_move_insn (validize_mem (mem),
681 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
683 stack_save = gen_rtx_MEM (sa_mode,
684 plus_constant (buf_addr,
685 2 * GET_MODE_SIZE (Pmode)));
686 set_mem_alias_set (stack_save, setjmp_alias_set);
687 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
689 /* If there is further processing to do, do it. */
690 #ifdef HAVE_builtin_setjmp_setup
691 if (HAVE_builtin_setjmp_setup)
692 emit_insn (gen_builtin_setjmp_setup (buf_addr));
693 #endif
695 /* Tell optimize_save_area_alloca that extra work is going to
696 need to go on during alloca. */
697 cfun->calls_setjmp = 1;
699 /* We have a nonlocal label. */
700 cfun->has_nonlocal_label = 1;
703 /* Construct the trailing part of a __builtin_setjmp call. This is
704 also called directly by the SJLJ exception handling code. */
706 void
707 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
709 /* Clobber the FP when we get here, so we have to make sure it's
710 marked as used by this function. */
711 emit_use (hard_frame_pointer_rtx);
713 /* Mark the static chain as clobbered here so life information
714 doesn't get messed up for it. */
715 emit_clobber (static_chain_rtx);
717 /* Now put in the code to restore the frame pointer, and argument
718 pointer, if needed. */
719 #ifdef HAVE_nonlocal_goto
720 if (! HAVE_nonlocal_goto)
721 #endif
723 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
724 /* This might change the hard frame pointer in ways that aren't
725 apparent to early optimization passes, so force a clobber. */
726 emit_clobber (hard_frame_pointer_rtx);
729 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
730 if (fixed_regs[ARG_POINTER_REGNUM])
732 #ifdef ELIMINABLE_REGS
733 size_t i;
734 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
736 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
737 if (elim_regs[i].from == ARG_POINTER_REGNUM
738 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
739 break;
741 if (i == ARRAY_SIZE (elim_regs))
742 #endif
744 /* Now restore our arg pointer from the address at which it
745 was saved in our stack frame. */
746 emit_move_insn (virtual_incoming_args_rtx,
747 copy_to_reg (get_arg_pointer_save_area ()));
750 #endif
752 #ifdef HAVE_builtin_setjmp_receiver
753 if (HAVE_builtin_setjmp_receiver)
754 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
755 else
756 #endif
757 #ifdef HAVE_nonlocal_goto_receiver
758 if (HAVE_nonlocal_goto_receiver)
759 emit_insn (gen_nonlocal_goto_receiver ());
760 else
761 #endif
762 { /* Nothing */ }
764 /* We must not allow the code we just generated to be reordered by
765 scheduling. Specifically, the update of the frame pointer must
766 happen immediately, not later. */
767 emit_insn (gen_blockage ());
770 /* __builtin_longjmp is passed a pointer to an array of five words (not
771 all will be used on all machines). It operates similarly to the C
772 library function of the same name, but is more efficient. Much of
773 the code below is copied from the handling of non-local gotos. */
775 static void
776 expand_builtin_longjmp (rtx buf_addr, rtx value)
778 rtx fp, lab, stack, insn, last;
779 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
781 if (setjmp_alias_set == -1)
782 setjmp_alias_set = new_alias_set ();
784 buf_addr = convert_memory_address (Pmode, buf_addr);
786 buf_addr = force_reg (Pmode, buf_addr);
788 /* We used to store value in static_chain_rtx, but that fails if pointers
789 are smaller than integers. We instead require that the user must pass
790 a second argument of 1, because that is what builtin_setjmp will
791 return. This also makes EH slightly more efficient, since we are no
792 longer copying around a value that we don't care about. */
793 gcc_assert (value == const1_rtx);
795 last = get_last_insn ();
796 #ifdef HAVE_builtin_longjmp
797 if (HAVE_builtin_longjmp)
798 emit_insn (gen_builtin_longjmp (buf_addr));
799 else
800 #endif
802 fp = gen_rtx_MEM (Pmode, buf_addr);
803 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
804 GET_MODE_SIZE (Pmode)));
806 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (fp, setjmp_alias_set);
809 set_mem_alias_set (lab, setjmp_alias_set);
810 set_mem_alias_set (stack, setjmp_alias_set);
812 /* Pick up FP, label, and SP from the block and jump. This code is
813 from expand_goto in stmt.c; see there for detailed comments. */
814 #ifdef HAVE_nonlocal_goto
815 if (HAVE_nonlocal_goto)
816 /* We have to pass a value to the nonlocal_goto pattern that will
817 get copied into the static_chain pointer, but it does not matter
818 what that value is, because builtin_setjmp does not use it. */
819 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
820 else
821 #endif
823 lab = copy_to_reg (lab);
825 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
826 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
828 emit_move_insn (hard_frame_pointer_rtx, fp);
829 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
831 emit_use (hard_frame_pointer_rtx);
832 emit_use (stack_pointer_rtx);
833 emit_indirect_jump (lab);
837 /* Search backwards and mark the jump insn as a non-local goto.
838 Note that this precludes the use of __builtin_longjmp to a
839 __builtin_setjmp target in the same function. However, we've
840 already cautioned the user that these functions are for
841 internal exception handling use only. */
842 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
844 gcc_assert (insn != last);
846 if (JUMP_P (insn))
848 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
849 REG_NOTES (insn));
850 break;
852 else if (CALL_P (insn))
853 break;
857 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
858 and the address of the save area. */
860 static rtx
861 expand_builtin_nonlocal_goto (tree exp)
863 tree t_label, t_save_area;
864 rtx r_label, r_save_area, r_fp, r_sp, insn;
866 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
867 return NULL_RTX;
869 t_label = CALL_EXPR_ARG (exp, 0);
870 t_save_area = CALL_EXPR_ARG (exp, 1);
872 r_label = expand_normal (t_label);
873 r_label = convert_memory_address (Pmode, r_label);
874 r_save_area = expand_normal (t_save_area);
875 r_save_area = convert_memory_address (Pmode, r_save_area);
876 /* Copy the address of the save location to a register just in case it was based
877 on the frame pointer. */
878 r_save_area = copy_to_reg (r_save_area);
879 r_fp = gen_rtx_MEM (Pmode, r_save_area);
880 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
881 plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
883 crtl->has_nonlocal_goto = 1;
885 #ifdef HAVE_nonlocal_goto
886 /* ??? We no longer need to pass the static chain value, afaik. */
887 if (HAVE_nonlocal_goto)
888 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
889 else
890 #endif
892 r_label = copy_to_reg (r_label);
894 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
895 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
897 /* Restore frame pointer for containing function.
898 This sets the actual hard register used for the frame pointer
899 to the location of the function's incoming static chain info.
900 The non-local goto handler will then adjust it to contain the
901 proper value and reload the argument pointer, if needed. */
902 emit_move_insn (hard_frame_pointer_rtx, r_fp);
903 emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
905 /* USE of hard_frame_pointer_rtx added for consistency;
906 not clear if really needed. */
907 emit_use (hard_frame_pointer_rtx);
908 emit_use (stack_pointer_rtx);
910 /* If the architecture is using a GP register, we must
911 conservatively assume that the target function makes use of it.
912 The prologue of functions with nonlocal gotos must therefore
913 initialize the GP register to the appropriate value, and we
914 must then make sure that this value is live at the point
915 of the jump. (Note that this doesn't necessarily apply
916 to targets with a nonlocal_goto pattern; they are free
917 to implement it in their own way. Note also that this is
918 a no-op if the GP register is a global invariant.) */
919 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
920 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
921 emit_use (pic_offset_table_rtx);
923 emit_indirect_jump (r_label);
926 /* Search backwards to the jump insn and mark it as a
927 non-local goto. */
928 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
930 if (JUMP_P (insn))
932 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
933 const0_rtx, REG_NOTES (insn));
934 break;
936 else if (CALL_P (insn))
937 break;
940 return const0_rtx;
943 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
944 (not all will be used on all machines) that was passed to __builtin_setjmp.
945 It updates the stack pointer in that block to correspond to the current
946 stack pointer. */
948 static void
949 expand_builtin_update_setjmp_buf (rtx buf_addr)
951 enum machine_mode sa_mode = Pmode;
952 rtx stack_save;
955 #ifdef HAVE_save_stack_nonlocal
956 if (HAVE_save_stack_nonlocal)
957 sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
958 #endif
959 #ifdef STACK_SAVEAREA_MODE
960 sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
961 #endif
963 stack_save
964 = gen_rtx_MEM (sa_mode,
965 memory_address
966 (sa_mode,
967 plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
969 #ifdef HAVE_setjmp
970 if (HAVE_setjmp)
971 emit_insn (gen_setjmp ());
972 #endif
974 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
977 /* Expand a call to __builtin_prefetch. For a target that does not support
978 data prefetch, evaluate the memory address argument in case it has side
979 effects. */
981 static void
982 expand_builtin_prefetch (tree exp)
984 tree arg0, arg1, arg2;
985 int nargs;
986 rtx op0, op1, op2;
988 if (!validate_arglist (exp, POINTER_TYPE, 0))
989 return;
991 arg0 = CALL_EXPR_ARG (exp, 0);
993 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
994 zero (read) and argument 2 (locality) defaults to 3 (high degree of
995 locality). */
996 nargs = call_expr_nargs (exp);
997 if (nargs > 1)
998 arg1 = CALL_EXPR_ARG (exp, 1);
999 else
1000 arg1 = integer_zero_node;
1001 if (nargs > 2)
1002 arg2 = CALL_EXPR_ARG (exp, 2);
1003 else
1004 arg2 = build_int_cst (NULL_TREE, 3);
1006 /* Argument 0 is an address. */
1007 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1009 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1010 if (TREE_CODE (arg1) != INTEGER_CST)
1012 error ("second argument to %<__builtin_prefetch%> must be a constant");
1013 arg1 = integer_zero_node;
1015 op1 = expand_normal (arg1);
1016 /* Argument 1 must be either zero or one. */
1017 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1019 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1020 " using zero");
1021 op1 = const0_rtx;
1024 /* Argument 2 (locality) must be a compile-time constant int. */
1025 if (TREE_CODE (arg2) != INTEGER_CST)
1027 error ("third argument to %<__builtin_prefetch%> must be a constant");
1028 arg2 = integer_zero_node;
1030 op2 = expand_normal (arg2);
1031 /* Argument 2 must be 0, 1, 2, or 3. */
1032 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1034 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1035 op2 = const0_rtx;
1038 #ifdef HAVE_prefetch
1039 if (HAVE_prefetch)
1041 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1042 (op0,
1043 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1044 || (GET_MODE (op0) != Pmode))
1046 op0 = convert_memory_address (Pmode, op0);
1047 op0 = force_reg (Pmode, op0);
1049 emit_insn (gen_prefetch (op0, op1, op2));
1051 #endif
1053 /* Don't do anything with direct references to volatile memory, but
1054 generate code to handle other side effects. */
1055 if (!MEM_P (op0) && side_effects_p (op0))
1056 emit_insn (op0);
1059 /* Get a MEM rtx for expression EXP which is the address of an operand
1060 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1061 the maximum length of the block of memory that might be accessed or
1062 NULL if unknown. */
1064 static rtx
1065 get_memory_rtx (tree exp, tree len)
1067 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1068 rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1070 /* Get an expression we can use to find the attributes to assign to MEM.
1071 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1072 we can. First remove any nops. */
1073 while (CONVERT_EXPR_P (exp)
1074 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1075 exp = TREE_OPERAND (exp, 0);
1077 if (TREE_CODE (exp) == ADDR_EXPR)
1078 exp = TREE_OPERAND (exp, 0);
1079 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1080 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1081 else
1082 exp = NULL;
1084 /* Honor attributes derived from exp, except for the alias set
1085 (as builtin stringops may alias with anything) and the size
1086 (as stringops may access multiple array elements). */
1087 if (exp)
1089 set_mem_attributes (mem, exp, 0);
1091 /* Allow the string and memory builtins to overflow from one
1092 field into another, see http://gcc.gnu.org/PR23561.
1093 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1094 memory accessed by the string or memory builtin will fit
1095 within the field. */
1096 if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1098 tree mem_expr = MEM_EXPR (mem);
1099 HOST_WIDE_INT offset = -1, length = -1;
1100 tree inner = exp;
1102 while (TREE_CODE (inner) == ARRAY_REF
1103 || CONVERT_EXPR_P (inner)
1104 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1105 || TREE_CODE (inner) == SAVE_EXPR)
1106 inner = TREE_OPERAND (inner, 0);
1108 gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1110 if (MEM_OFFSET (mem)
1111 && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1112 offset = INTVAL (MEM_OFFSET (mem));
1114 if (offset >= 0 && len && host_integerp (len, 0))
1115 length = tree_low_cst (len, 0);
1117 while (TREE_CODE (inner) == COMPONENT_REF)
1119 tree field = TREE_OPERAND (inner, 1);
1120 gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1121 gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1123 /* Bitfields are generally not byte-addressable. */
1124 gcc_assert (!DECL_BIT_FIELD (field)
1125 || ((tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1126 % BITS_PER_UNIT) == 0
1127 && host_integerp (DECL_SIZE (field), 0)
1128 && (TREE_INT_CST_LOW (DECL_SIZE (field))
1129 % BITS_PER_UNIT) == 0));
1131 /* If we can prove that the memory starting at XEXP (mem, 0) and
1132 ending at XEXP (mem, 0) + LENGTH will fit into this field, we
1133 can keep the COMPONENT_REF in MEM_EXPR. But be careful with
1134 fields without DECL_SIZE_UNIT like flexible array members. */
1135 if (length >= 0
1136 && DECL_SIZE_UNIT (field)
1137 && host_integerp (DECL_SIZE_UNIT (field), 0))
1139 HOST_WIDE_INT size
1140 = TREE_INT_CST_LOW (DECL_SIZE_UNIT (field));
1141 if (offset <= size
1142 && length <= size
1143 && offset + length <= size)
1144 break;
1147 if (offset >= 0
1148 && host_integerp (DECL_FIELD_OFFSET (field), 0))
1149 offset += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
1150 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1151 / BITS_PER_UNIT;
1152 else
1154 offset = -1;
1155 length = -1;
1158 mem_expr = TREE_OPERAND (mem_expr, 0);
1159 inner = TREE_OPERAND (inner, 0);
1162 if (mem_expr == NULL)
1163 offset = -1;
1164 if (mem_expr != MEM_EXPR (mem))
1166 set_mem_expr (mem, mem_expr);
1167 set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1170 set_mem_alias_set (mem, 0);
1171 set_mem_size (mem, NULL_RTX);
1174 return mem;
1177 /* Built-in functions to perform an untyped call and return. */
1179 /* For each register that may be used for calling a function, this
1180 gives a mode used to copy the register's value. VOIDmode indicates
1181 the register is not used for calling a function. If the machine
1182 has register windows, this gives only the outbound registers.
1183 INCOMING_REGNO gives the corresponding inbound register. */
1184 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1186 /* For each register that may be used for returning values, this gives
1187 a mode used to copy the register's value. VOIDmode indicates the
1188 register is not used for returning values. If the machine has
1189 register windows, this gives only the outbound registers.
1190 INCOMING_REGNO gives the corresponding inbound register. */
1191 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1193 /* For each register that may be used for calling a function, this
1194 gives the offset of that register into the block returned by
1195 __builtin_apply_args. 0 indicates that the register is not
1196 used for calling a function. */
1197 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1199 /* Return the size required for the block returned by __builtin_apply_args,
1200 and initialize apply_args_mode. */
1202 static int
1203 apply_args_size (void)
1205 static int size = -1;
1206 int align;
1207 unsigned int regno;
1208 enum machine_mode mode;
1210 /* The values computed by this function never change. */
1211 if (size < 0)
1213 /* The first value is the incoming arg-pointer. */
1214 size = GET_MODE_SIZE (Pmode);
1216 /* The second value is the structure value address unless this is
1217 passed as an "invisible" first argument. */
1218 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1219 size += GET_MODE_SIZE (Pmode);
1221 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1222 if (FUNCTION_ARG_REGNO_P (regno))
1224 mode = reg_raw_mode[regno];
1226 gcc_assert (mode != VOIDmode);
1228 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1229 if (size % align != 0)
1230 size = CEIL (size, align) * align;
1231 apply_args_reg_offset[regno] = size;
1232 size += GET_MODE_SIZE (mode);
1233 apply_args_mode[regno] = mode;
1235 else
1237 apply_args_mode[regno] = VOIDmode;
1238 apply_args_reg_offset[regno] = 0;
1241 return size;
1244 /* Return the size required for the block returned by __builtin_apply,
1245 and initialize apply_result_mode. */
1247 static int
1248 apply_result_size (void)
1250 static int size = -1;
1251 int align, regno;
1252 enum machine_mode mode;
1254 /* The values computed by this function never change. */
1255 if (size < 0)
1257 size = 0;
1259 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1260 if (FUNCTION_VALUE_REGNO_P (regno))
1262 mode = reg_raw_mode[regno];
1264 gcc_assert (mode != VOIDmode);
1266 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1267 if (size % align != 0)
1268 size = CEIL (size, align) * align;
1269 size += GET_MODE_SIZE (mode);
1270 apply_result_mode[regno] = mode;
1272 else
1273 apply_result_mode[regno] = VOIDmode;
1275 /* Allow targets that use untyped_call and untyped_return to override
1276 the size so that machine-specific information can be stored here. */
1277 #ifdef APPLY_RESULT_SIZE
1278 size = APPLY_RESULT_SIZE;
1279 #endif
1281 return size;
1284 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1285 /* Create a vector describing the result block RESULT. If SAVEP is true,
1286 the result block is used to save the values; otherwise it is used to
1287 restore the values. */
1289 static rtx
1290 result_vector (int savep, rtx result)
1292 int regno, size, align, nelts;
1293 enum machine_mode mode;
1294 rtx reg, mem;
1295 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1297 size = nelts = 0;
1298 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1299 if ((mode = apply_result_mode[regno]) != VOIDmode)
1301 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1302 if (size % align != 0)
1303 size = CEIL (size, align) * align;
1304 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1305 mem = adjust_address (result, mode, size);
1306 savevec[nelts++] = (savep
1307 ? gen_rtx_SET (VOIDmode, mem, reg)
1308 : gen_rtx_SET (VOIDmode, reg, mem));
1309 size += GET_MODE_SIZE (mode);
1311 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1313 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1315 /* Save the state required to perform an untyped call with the same
1316 arguments as were passed to the current function. */
1318 static rtx
1319 expand_builtin_apply_args_1 (void)
1321 rtx registers, tem;
1322 int size, align, regno;
1323 enum machine_mode mode;
1324 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1326 /* Create a block where the arg-pointer, structure value address,
1327 and argument registers can be saved. */
1328 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1330 /* Walk past the arg-pointer and structure value address. */
1331 size = GET_MODE_SIZE (Pmode);
1332 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1333 size += GET_MODE_SIZE (Pmode);
1335 /* Save each register used in calling a function to the block. */
1336 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1337 if ((mode = apply_args_mode[regno]) != VOIDmode)
1339 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1340 if (size % align != 0)
1341 size = CEIL (size, align) * align;
1343 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1345 emit_move_insn (adjust_address (registers, mode, size), tem);
1346 size += GET_MODE_SIZE (mode);
1349 /* Save the arg pointer to the block. */
1350 tem = copy_to_reg (virtual_incoming_args_rtx);
1351 #ifdef STACK_GROWS_DOWNWARD
1352 /* We need the pointer as the caller actually passed them to us, not
1353 as we might have pretended they were passed. Make sure it's a valid
1354 operand, as emit_move_insn isn't expected to handle a PLUS. */
1356 = force_operand (plus_constant (tem, crtl->args.pretend_args_size),
1357 NULL_RTX);
1358 #endif
1359 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1361 size = GET_MODE_SIZE (Pmode);
1363 /* Save the structure value address unless this is passed as an
1364 "invisible" first argument. */
1365 if (struct_incoming_value)
1367 emit_move_insn (adjust_address (registers, Pmode, size),
1368 copy_to_reg (struct_incoming_value));
1369 size += GET_MODE_SIZE (Pmode);
1372 /* Return the address of the block. */
1373 return copy_addr_to_reg (XEXP (registers, 0));
1376 /* __builtin_apply_args returns block of memory allocated on
1377 the stack into which is stored the arg pointer, structure
1378 value address, static chain, and all the registers that might
1379 possibly be used in performing a function call. The code is
1380 moved to the start of the function so the incoming values are
1381 saved. */
1383 static rtx
1384 expand_builtin_apply_args (void)
1386 /* Don't do __builtin_apply_args more than once in a function.
1387 Save the result of the first call and reuse it. */
1388 if (apply_args_value != 0)
1389 return apply_args_value;
1391 /* When this function is called, it means that registers must be
1392 saved on entry to this function. So we migrate the
1393 call to the first insn of this function. */
1394 rtx temp;
1395 rtx seq;
1397 start_sequence ();
1398 temp = expand_builtin_apply_args_1 ();
1399 seq = get_insns ();
1400 end_sequence ();
1402 apply_args_value = temp;
1404 /* Put the insns after the NOTE that starts the function.
1405 If this is inside a start_sequence, make the outer-level insn
1406 chain current, so the code is placed at the start of the
1407 function. */
1408 push_topmost_sequence ();
1409 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1410 pop_topmost_sequence ();
1411 return temp;
1415 /* Perform an untyped call and save the state required to perform an
1416 untyped return of whatever value was returned by the given function. */
1418 static rtx
1419 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1421 int size, align, regno;
1422 enum machine_mode mode;
1423 rtx incoming_args, result, reg, dest, src, call_insn;
1424 rtx old_stack_level = 0;
1425 rtx call_fusage = 0;
1426 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1428 arguments = convert_memory_address (Pmode, arguments);
1430 /* Create a block where the return registers can be saved. */
1431 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1433 /* Fetch the arg pointer from the ARGUMENTS block. */
1434 incoming_args = gen_reg_rtx (Pmode);
1435 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1436 #ifndef STACK_GROWS_DOWNWARD
1437 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1438 incoming_args, 0, OPTAB_LIB_WIDEN);
1439 #endif
1441 /* Push a new argument block and copy the arguments. Do not allow
1442 the (potential) memcpy call below to interfere with our stack
1443 manipulations. */
1444 do_pending_stack_adjust ();
1445 NO_DEFER_POP;
1447 /* Save the stack with nonlocal if available. */
1448 #ifdef HAVE_save_stack_nonlocal
1449 if (HAVE_save_stack_nonlocal)
1450 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1451 else
1452 #endif
1453 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1455 /* Allocate a block of memory onto the stack and copy the memory
1456 arguments to the outgoing arguments address. */
1457 allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1458 dest = virtual_outgoing_args_rtx;
1459 #ifndef STACK_GROWS_DOWNWARD
1460 if (GET_CODE (argsize) == CONST_INT)
1461 dest = plus_constant (dest, -INTVAL (argsize));
1462 else
1463 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1464 #endif
1465 dest = gen_rtx_MEM (BLKmode, dest);
1466 set_mem_align (dest, PARM_BOUNDARY);
1467 src = gen_rtx_MEM (BLKmode, incoming_args);
1468 set_mem_align (src, PARM_BOUNDARY);
1469 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1471 /* Refer to the argument block. */
1472 apply_args_size ();
1473 arguments = gen_rtx_MEM (BLKmode, arguments);
1474 set_mem_align (arguments, PARM_BOUNDARY);
1476 /* Walk past the arg-pointer and structure value address. */
1477 size = GET_MODE_SIZE (Pmode);
1478 if (struct_value)
1479 size += GET_MODE_SIZE (Pmode);
1481 /* Restore each of the registers previously saved. Make USE insns
1482 for each of these registers for use in making the call. */
1483 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1484 if ((mode = apply_args_mode[regno]) != VOIDmode)
1486 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1487 if (size % align != 0)
1488 size = CEIL (size, align) * align;
1489 reg = gen_rtx_REG (mode, regno);
1490 emit_move_insn (reg, adjust_address (arguments, mode, size));
1491 use_reg (&call_fusage, reg);
1492 size += GET_MODE_SIZE (mode);
1495 /* Restore the structure value address unless this is passed as an
1496 "invisible" first argument. */
1497 size = GET_MODE_SIZE (Pmode);
1498 if (struct_value)
1500 rtx value = gen_reg_rtx (Pmode);
1501 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1502 emit_move_insn (struct_value, value);
1503 if (REG_P (struct_value))
1504 use_reg (&call_fusage, struct_value);
1505 size += GET_MODE_SIZE (Pmode);
1508 /* All arguments and registers used for the call are set up by now! */
1509 function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1511 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1512 and we don't want to load it into a register as an optimization,
1513 because prepare_call_address already did it if it should be done. */
1514 if (GET_CODE (function) != SYMBOL_REF)
1515 function = memory_address (FUNCTION_MODE, function);
1517 /* Generate the actual call instruction and save the return value. */
1518 #ifdef HAVE_untyped_call
1519 if (HAVE_untyped_call)
1520 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1521 result, result_vector (1, result)));
1522 else
1523 #endif
1524 #ifdef HAVE_call_value
1525 if (HAVE_call_value)
1527 rtx valreg = 0;
1529 /* Locate the unique return register. It is not possible to
1530 express a call that sets more than one return register using
1531 call_value; use untyped_call for that. In fact, untyped_call
1532 only needs to save the return registers in the given block. */
1533 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1534 if ((mode = apply_result_mode[regno]) != VOIDmode)
1536 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1538 valreg = gen_rtx_REG (mode, regno);
1541 emit_call_insn (GEN_CALL_VALUE (valreg,
1542 gen_rtx_MEM (FUNCTION_MODE, function),
1543 const0_rtx, NULL_RTX, const0_rtx));
1545 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1547 else
1548 #endif
1549 gcc_unreachable ();
1551 /* Find the CALL insn we just emitted, and attach the register usage
1552 information. */
1553 call_insn = last_call_insn ();
1554 add_function_usage_to (call_insn, call_fusage);
1556 /* Restore the stack. */
1557 #ifdef HAVE_save_stack_nonlocal
1558 if (HAVE_save_stack_nonlocal)
1559 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1560 else
1561 #endif
1562 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1564 OK_DEFER_POP;
1566 /* Return the address of the result block. */
1567 result = copy_addr_to_reg (XEXP (result, 0));
1568 return convert_memory_address (ptr_mode, result);
1571 /* Perform an untyped return. */
1573 static void
1574 expand_builtin_return (rtx result)
1576 int size, align, regno;
1577 enum machine_mode mode;
1578 rtx reg;
1579 rtx call_fusage = 0;
1581 result = convert_memory_address (Pmode, result);
1583 apply_result_size ();
1584 result = gen_rtx_MEM (BLKmode, result);
1586 #ifdef HAVE_untyped_return
1587 if (HAVE_untyped_return)
1589 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1590 emit_barrier ();
1591 return;
1593 #endif
1595 /* Restore the return value and note that each value is used. */
1596 size = 0;
1597 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1598 if ((mode = apply_result_mode[regno]) != VOIDmode)
1600 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1601 if (size % align != 0)
1602 size = CEIL (size, align) * align;
1603 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1604 emit_move_insn (reg, adjust_address (result, mode, size));
1606 push_to_sequence (call_fusage);
1607 emit_use (reg);
1608 call_fusage = get_insns ();
1609 end_sequence ();
1610 size += GET_MODE_SIZE (mode);
1613 /* Put the USE insns before the return. */
1614 emit_insn (call_fusage);
1616 /* Return whatever values was restored by jumping directly to the end
1617 of the function. */
1618 expand_naked_return ();
1621 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1623 static enum type_class
1624 type_to_class (tree type)
1626 switch (TREE_CODE (type))
1628 case VOID_TYPE: return void_type_class;
1629 case INTEGER_TYPE: return integer_type_class;
1630 case ENUMERAL_TYPE: return enumeral_type_class;
1631 case BOOLEAN_TYPE: return boolean_type_class;
1632 case POINTER_TYPE: return pointer_type_class;
1633 case REFERENCE_TYPE: return reference_type_class;
1634 case OFFSET_TYPE: return offset_type_class;
1635 case REAL_TYPE: return real_type_class;
1636 case COMPLEX_TYPE: return complex_type_class;
1637 case FUNCTION_TYPE: return function_type_class;
1638 case METHOD_TYPE: return method_type_class;
1639 case RECORD_TYPE: return record_type_class;
1640 case UNION_TYPE:
1641 case QUAL_UNION_TYPE: return union_type_class;
1642 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1643 ? string_type_class : array_type_class);
1644 case LANG_TYPE: return lang_type_class;
1645 default: return no_type_class;
1649 /* Expand a call EXP to __builtin_classify_type. */
1651 static rtx
1652 expand_builtin_classify_type (tree exp)
1654 if (call_expr_nargs (exp))
1655 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1656 return GEN_INT (no_type_class);
1659 /* This helper macro, meant to be used in mathfn_built_in below,
1660 determines which among a set of three builtin math functions is
1661 appropriate for a given type mode. The `F' and `L' cases are
1662 automatically generated from the `double' case. */
1663 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1664 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1665 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1666 fcodel = BUILT_IN_MATHFN##L ; break;
1667 /* Similar to above, but appends _R after any F/L suffix. */
1668 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1669 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1670 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1671 fcodel = BUILT_IN_MATHFN##L_R ; break;
1673 /* Return mathematic function equivalent to FN but operating directly
1674 on TYPE, if available. If IMPLICIT is true find the function in
1675 implicit_built_in_decls[], otherwise use built_in_decls[]. If we
1676 can't do the conversion, return zero. */
1678 static tree
1679 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit)
1681 tree const *const fn_arr
1682 = implicit ? implicit_built_in_decls : built_in_decls;
1683 enum built_in_function fcode, fcodef, fcodel;
1685 switch (fn)
1687 CASE_MATHFN (BUILT_IN_ACOS)
1688 CASE_MATHFN (BUILT_IN_ACOSH)
1689 CASE_MATHFN (BUILT_IN_ASIN)
1690 CASE_MATHFN (BUILT_IN_ASINH)
1691 CASE_MATHFN (BUILT_IN_ATAN)
1692 CASE_MATHFN (BUILT_IN_ATAN2)
1693 CASE_MATHFN (BUILT_IN_ATANH)
1694 CASE_MATHFN (BUILT_IN_CBRT)
1695 CASE_MATHFN (BUILT_IN_CEIL)
1696 CASE_MATHFN (BUILT_IN_CEXPI)
1697 CASE_MATHFN (BUILT_IN_COPYSIGN)
1698 CASE_MATHFN (BUILT_IN_COS)
1699 CASE_MATHFN (BUILT_IN_COSH)
1700 CASE_MATHFN (BUILT_IN_DREM)
1701 CASE_MATHFN (BUILT_IN_ERF)
1702 CASE_MATHFN (BUILT_IN_ERFC)
1703 CASE_MATHFN (BUILT_IN_EXP)
1704 CASE_MATHFN (BUILT_IN_EXP10)
1705 CASE_MATHFN (BUILT_IN_EXP2)
1706 CASE_MATHFN (BUILT_IN_EXPM1)
1707 CASE_MATHFN (BUILT_IN_FABS)
1708 CASE_MATHFN (BUILT_IN_FDIM)
1709 CASE_MATHFN (BUILT_IN_FLOOR)
1710 CASE_MATHFN (BUILT_IN_FMA)
1711 CASE_MATHFN (BUILT_IN_FMAX)
1712 CASE_MATHFN (BUILT_IN_FMIN)
1713 CASE_MATHFN (BUILT_IN_FMOD)
1714 CASE_MATHFN (BUILT_IN_FREXP)
1715 CASE_MATHFN (BUILT_IN_GAMMA)
1716 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1717 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1718 CASE_MATHFN (BUILT_IN_HYPOT)
1719 CASE_MATHFN (BUILT_IN_ILOGB)
1720 CASE_MATHFN (BUILT_IN_INF)
1721 CASE_MATHFN (BUILT_IN_ISINF)
1722 CASE_MATHFN (BUILT_IN_J0)
1723 CASE_MATHFN (BUILT_IN_J1)
1724 CASE_MATHFN (BUILT_IN_JN)
1725 CASE_MATHFN (BUILT_IN_LCEIL)
1726 CASE_MATHFN (BUILT_IN_LDEXP)
1727 CASE_MATHFN (BUILT_IN_LFLOOR)
1728 CASE_MATHFN (BUILT_IN_LGAMMA)
1729 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1730 CASE_MATHFN (BUILT_IN_LLCEIL)
1731 CASE_MATHFN (BUILT_IN_LLFLOOR)
1732 CASE_MATHFN (BUILT_IN_LLRINT)
1733 CASE_MATHFN (BUILT_IN_LLROUND)
1734 CASE_MATHFN (BUILT_IN_LOG)
1735 CASE_MATHFN (BUILT_IN_LOG10)
1736 CASE_MATHFN (BUILT_IN_LOG1P)
1737 CASE_MATHFN (BUILT_IN_LOG2)
1738 CASE_MATHFN (BUILT_IN_LOGB)
1739 CASE_MATHFN (BUILT_IN_LRINT)
1740 CASE_MATHFN (BUILT_IN_LROUND)
1741 CASE_MATHFN (BUILT_IN_MODF)
1742 CASE_MATHFN (BUILT_IN_NAN)
1743 CASE_MATHFN (BUILT_IN_NANS)
1744 CASE_MATHFN (BUILT_IN_NEARBYINT)
1745 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1746 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1747 CASE_MATHFN (BUILT_IN_POW)
1748 CASE_MATHFN (BUILT_IN_POWI)
1749 CASE_MATHFN (BUILT_IN_POW10)
1750 CASE_MATHFN (BUILT_IN_REMAINDER)
1751 CASE_MATHFN (BUILT_IN_REMQUO)
1752 CASE_MATHFN (BUILT_IN_RINT)
1753 CASE_MATHFN (BUILT_IN_ROUND)
1754 CASE_MATHFN (BUILT_IN_SCALB)
1755 CASE_MATHFN (BUILT_IN_SCALBLN)
1756 CASE_MATHFN (BUILT_IN_SCALBN)
1757 CASE_MATHFN (BUILT_IN_SIGNBIT)
1758 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1759 CASE_MATHFN (BUILT_IN_SIN)
1760 CASE_MATHFN (BUILT_IN_SINCOS)
1761 CASE_MATHFN (BUILT_IN_SINH)
1762 CASE_MATHFN (BUILT_IN_SQRT)
1763 CASE_MATHFN (BUILT_IN_TAN)
1764 CASE_MATHFN (BUILT_IN_TANH)
1765 CASE_MATHFN (BUILT_IN_TGAMMA)
1766 CASE_MATHFN (BUILT_IN_TRUNC)
1767 CASE_MATHFN (BUILT_IN_Y0)
1768 CASE_MATHFN (BUILT_IN_Y1)
1769 CASE_MATHFN (BUILT_IN_YN)
1771 default:
1772 return NULL_TREE;
1775 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1776 return fn_arr[fcode];
1777 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1778 return fn_arr[fcodef];
1779 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1780 return fn_arr[fcodel];
1781 else
1782 return NULL_TREE;
1785 /* Like mathfn_built_in_1(), but always use the implicit array. */
1787 tree
1788 mathfn_built_in (tree type, enum built_in_function fn)
1790 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1793 /* If errno must be maintained, expand the RTL to check if the result,
1794 TARGET, of a built-in function call, EXP, is NaN, and if so set
1795 errno to EDOM. */
1797 static void
1798 expand_errno_check (tree exp, rtx target)
1800 rtx lab = gen_label_rtx ();
1802 /* Test the result; if it is NaN, set errno=EDOM because
1803 the argument was not in the domain. */
1804 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1805 0, lab);
1807 #ifdef TARGET_EDOM
1808 /* If this built-in doesn't throw an exception, set errno directly. */
1809 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1811 #ifdef GEN_ERRNO_RTX
1812 rtx errno_rtx = GEN_ERRNO_RTX;
1813 #else
1814 rtx errno_rtx
1815 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1816 #endif
1817 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1818 emit_label (lab);
1819 return;
1821 #endif
1823 /* Make sure the library call isn't expanded as a tail call. */
1824 CALL_EXPR_TAILCALL (exp) = 0;
1826 /* We can't set errno=EDOM directly; let the library call do it.
1827 Pop the arguments right away in case the call gets deleted. */
1828 NO_DEFER_POP;
1829 expand_call (exp, target, 0);
1830 OK_DEFER_POP;
1831 emit_label (lab);
1834 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1835 Return NULL_RTX if a normal call should be emitted rather than expanding
1836 the function in-line. EXP is the expression that is a call to the builtin
1837 function; if convenient, the result should be placed in TARGET.
1838 SUBTARGET may be used as the target for computing one of EXP's operands. */
1840 static rtx
1841 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1843 optab builtin_optab;
1844 rtx op0, insns, before_call;
1845 tree fndecl = get_callee_fndecl (exp);
1846 enum machine_mode mode;
1847 bool errno_set = false;
1848 tree arg;
1850 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1851 return NULL_RTX;
1853 arg = CALL_EXPR_ARG (exp, 0);
1855 switch (DECL_FUNCTION_CODE (fndecl))
1857 CASE_FLT_FN (BUILT_IN_SQRT):
1858 errno_set = ! tree_expr_nonnegative_p (arg);
1859 builtin_optab = sqrt_optab;
1860 break;
1861 CASE_FLT_FN (BUILT_IN_EXP):
1862 errno_set = true; builtin_optab = exp_optab; break;
1863 CASE_FLT_FN (BUILT_IN_EXP10):
1864 CASE_FLT_FN (BUILT_IN_POW10):
1865 errno_set = true; builtin_optab = exp10_optab; break;
1866 CASE_FLT_FN (BUILT_IN_EXP2):
1867 errno_set = true; builtin_optab = exp2_optab; break;
1868 CASE_FLT_FN (BUILT_IN_EXPM1):
1869 errno_set = true; builtin_optab = expm1_optab; break;
1870 CASE_FLT_FN (BUILT_IN_LOGB):
1871 errno_set = true; builtin_optab = logb_optab; break;
1872 CASE_FLT_FN (BUILT_IN_LOG):
1873 errno_set = true; builtin_optab = log_optab; break;
1874 CASE_FLT_FN (BUILT_IN_LOG10):
1875 errno_set = true; builtin_optab = log10_optab; break;
1876 CASE_FLT_FN (BUILT_IN_LOG2):
1877 errno_set = true; builtin_optab = log2_optab; break;
1878 CASE_FLT_FN (BUILT_IN_LOG1P):
1879 errno_set = true; builtin_optab = log1p_optab; break;
1880 CASE_FLT_FN (BUILT_IN_ASIN):
1881 builtin_optab = asin_optab; break;
1882 CASE_FLT_FN (BUILT_IN_ACOS):
1883 builtin_optab = acos_optab; break;
1884 CASE_FLT_FN (BUILT_IN_TAN):
1885 builtin_optab = tan_optab; break;
1886 CASE_FLT_FN (BUILT_IN_ATAN):
1887 builtin_optab = atan_optab; break;
1888 CASE_FLT_FN (BUILT_IN_FLOOR):
1889 builtin_optab = floor_optab; break;
1890 CASE_FLT_FN (BUILT_IN_CEIL):
1891 builtin_optab = ceil_optab; break;
1892 CASE_FLT_FN (BUILT_IN_TRUNC):
1893 builtin_optab = btrunc_optab; break;
1894 CASE_FLT_FN (BUILT_IN_ROUND):
1895 builtin_optab = round_optab; break;
1896 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1897 builtin_optab = nearbyint_optab;
1898 if (flag_trapping_math)
1899 break;
1900 /* Else fallthrough and expand as rint. */
1901 CASE_FLT_FN (BUILT_IN_RINT):
1902 builtin_optab = rint_optab; break;
1903 default:
1904 gcc_unreachable ();
1907 /* Make a suitable register to place result in. */
1908 mode = TYPE_MODE (TREE_TYPE (exp));
1910 if (! flag_errno_math || ! HONOR_NANS (mode))
1911 errno_set = false;
1913 /* Before working hard, check whether the instruction is available. */
1914 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
1916 target = gen_reg_rtx (mode);
1918 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1919 need to expand the argument again. This way, we will not perform
1920 side-effects more the once. */
1921 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
1923 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
1925 start_sequence ();
1927 /* Compute into TARGET.
1928 Set TARGET to wherever the result comes back. */
1929 target = expand_unop (mode, builtin_optab, op0, target, 0);
1931 if (target != 0)
1933 if (errno_set)
1934 expand_errno_check (exp, target);
1936 /* Output the entire sequence. */
1937 insns = get_insns ();
1938 end_sequence ();
1939 emit_insn (insns);
1940 return target;
1943 /* If we were unable to expand via the builtin, stop the sequence
1944 (without outputting the insns) and call to the library function
1945 with the stabilized argument list. */
1946 end_sequence ();
1949 before_call = get_last_insn ();
1951 return expand_call (exp, target, target == const0_rtx);
1954 /* Expand a call to the builtin binary math functions (pow and atan2).
1955 Return NULL_RTX if a normal call should be emitted rather than expanding the
1956 function in-line. EXP is the expression that is a call to the builtin
1957 function; if convenient, the result should be placed in TARGET.
1958 SUBTARGET may be used as the target for computing one of EXP's
1959 operands. */
1961 static rtx
1962 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1964 optab builtin_optab;
1965 rtx op0, op1, insns;
1966 int op1_type = REAL_TYPE;
1967 tree fndecl = get_callee_fndecl (exp);
1968 tree arg0, arg1;
1969 enum machine_mode mode;
1970 bool errno_set = true;
1972 switch (DECL_FUNCTION_CODE (fndecl))
1974 CASE_FLT_FN (BUILT_IN_SCALBN):
1975 CASE_FLT_FN (BUILT_IN_SCALBLN):
1976 CASE_FLT_FN (BUILT_IN_LDEXP):
1977 op1_type = INTEGER_TYPE;
1978 default:
1979 break;
1982 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
1983 return NULL_RTX;
1985 arg0 = CALL_EXPR_ARG (exp, 0);
1986 arg1 = CALL_EXPR_ARG (exp, 1);
1988 switch (DECL_FUNCTION_CODE (fndecl))
1990 CASE_FLT_FN (BUILT_IN_POW):
1991 builtin_optab = pow_optab; break;
1992 CASE_FLT_FN (BUILT_IN_ATAN2):
1993 builtin_optab = atan2_optab; break;
1994 CASE_FLT_FN (BUILT_IN_SCALB):
1995 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
1996 return 0;
1997 builtin_optab = scalb_optab; break;
1998 CASE_FLT_FN (BUILT_IN_SCALBN):
1999 CASE_FLT_FN (BUILT_IN_SCALBLN):
2000 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2001 return 0;
2002 /* Fall through... */
2003 CASE_FLT_FN (BUILT_IN_LDEXP):
2004 builtin_optab = ldexp_optab; break;
2005 CASE_FLT_FN (BUILT_IN_FMOD):
2006 builtin_optab = fmod_optab; break;
2007 CASE_FLT_FN (BUILT_IN_REMAINDER):
2008 CASE_FLT_FN (BUILT_IN_DREM):
2009 builtin_optab = remainder_optab; break;
2010 default:
2011 gcc_unreachable ();
2014 /* Make a suitable register to place result in. */
2015 mode = TYPE_MODE (TREE_TYPE (exp));
2017 /* Before working hard, check whether the instruction is available. */
2018 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2019 return NULL_RTX;
2021 target = gen_reg_rtx (mode);
2023 if (! flag_errno_math || ! HONOR_NANS (mode))
2024 errno_set = false;
2026 /* Always stabilize the argument list. */
2027 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2028 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2030 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2031 op1 = expand_normal (arg1);
2033 start_sequence ();
2035 /* Compute into TARGET.
2036 Set TARGET to wherever the result comes back. */
2037 target = expand_binop (mode, builtin_optab, op0, op1,
2038 target, 0, OPTAB_DIRECT);
2040 /* If we were unable to expand via the builtin, stop the sequence
2041 (without outputting the insns) and call to the library function
2042 with the stabilized argument list. */
2043 if (target == 0)
2045 end_sequence ();
2046 return expand_call (exp, target, target == const0_rtx);
2049 if (errno_set)
2050 expand_errno_check (exp, target);
2052 /* Output the entire sequence. */
2053 insns = get_insns ();
2054 end_sequence ();
2055 emit_insn (insns);
2057 return target;
2060 /* Expand a call to the builtin sin and cos math functions.
2061 Return NULL_RTX if a normal call should be emitted rather than expanding the
2062 function in-line. EXP is the expression that is a call to the builtin
2063 function; if convenient, the result should be placed in TARGET.
2064 SUBTARGET may be used as the target for computing one of EXP's
2065 operands. */
2067 static rtx
2068 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2070 optab builtin_optab;
2071 rtx op0, insns;
2072 tree fndecl = get_callee_fndecl (exp);
2073 enum machine_mode mode;
2074 tree arg;
2076 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2077 return NULL_RTX;
2079 arg = CALL_EXPR_ARG (exp, 0);
2081 switch (DECL_FUNCTION_CODE (fndecl))
2083 CASE_FLT_FN (BUILT_IN_SIN):
2084 CASE_FLT_FN (BUILT_IN_COS):
2085 builtin_optab = sincos_optab; break;
2086 default:
2087 gcc_unreachable ();
2090 /* Make a suitable register to place result in. */
2091 mode = TYPE_MODE (TREE_TYPE (exp));
2093 /* Check if sincos insn is available, otherwise fallback
2094 to sin or cos insn. */
2095 if (optab_handler (builtin_optab, mode)->insn_code == CODE_FOR_nothing)
2096 switch (DECL_FUNCTION_CODE (fndecl))
2098 CASE_FLT_FN (BUILT_IN_SIN):
2099 builtin_optab = sin_optab; break;
2100 CASE_FLT_FN (BUILT_IN_COS):
2101 builtin_optab = cos_optab; break;
2102 default:
2103 gcc_unreachable ();
2106 /* Before working hard, check whether the instruction is available. */
2107 if (optab_handler (builtin_optab, mode)->insn_code != CODE_FOR_nothing)
2109 target = gen_reg_rtx (mode);
2111 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2112 need to expand the argument again. This way, we will not perform
2113 side-effects more the once. */
2114 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2116 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2118 start_sequence ();
2120 /* Compute into TARGET.
2121 Set TARGET to wherever the result comes back. */
2122 if (builtin_optab == sincos_optab)
2124 int result;
2126 switch (DECL_FUNCTION_CODE (fndecl))
2128 CASE_FLT_FN (BUILT_IN_SIN):
2129 result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2130 break;
2131 CASE_FLT_FN (BUILT_IN_COS):
2132 result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2133 break;
2134 default:
2135 gcc_unreachable ();
2137 gcc_assert (result);
2139 else
2141 target = expand_unop (mode, builtin_optab, op0, target, 0);
2144 if (target != 0)
2146 /* Output the entire sequence. */
2147 insns = get_insns ();
2148 end_sequence ();
2149 emit_insn (insns);
2150 return target;
2153 /* If we were unable to expand via the builtin, stop the sequence
2154 (without outputting the insns) and call to the library function
2155 with the stabilized argument list. */
2156 end_sequence ();
2159 target = expand_call (exp, target, target == const0_rtx);
2161 return target;
2164 /* Expand a call to one of the builtin math functions that operate on
2165 floating point argument and output an integer result (ilogb, isinf,
2166 isnan, etc).
2167 Return 0 if a normal call should be emitted rather than expanding the
2168 function in-line. EXP is the expression that is a call to the builtin
2169 function; if convenient, the result should be placed in TARGET.
2170 SUBTARGET may be used as the target for computing one of EXP's operands. */
2172 static rtx
2173 expand_builtin_interclass_mathfn (tree exp, rtx target, rtx subtarget)
2175 optab builtin_optab = 0;
2176 enum insn_code icode = CODE_FOR_nothing;
2177 rtx op0;
2178 tree fndecl = get_callee_fndecl (exp);
2179 enum machine_mode mode;
2180 bool errno_set = false;
2181 tree arg;
2183 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2184 return NULL_RTX;
2186 arg = CALL_EXPR_ARG (exp, 0);
2188 switch (DECL_FUNCTION_CODE (fndecl))
2190 CASE_FLT_FN (BUILT_IN_ILOGB):
2191 errno_set = true; builtin_optab = ilogb_optab; break;
2192 CASE_FLT_FN (BUILT_IN_ISINF):
2193 builtin_optab = isinf_optab; break;
2194 case BUILT_IN_ISNORMAL:
2195 case BUILT_IN_ISFINITE:
2196 CASE_FLT_FN (BUILT_IN_FINITE):
2197 /* These builtins have no optabs (yet). */
2198 break;
2199 default:
2200 gcc_unreachable ();
2203 /* There's no easy way to detect the case we need to set EDOM. */
2204 if (flag_errno_math && errno_set)
2205 return NULL_RTX;
2207 /* Optab mode depends on the mode of the input argument. */
2208 mode = TYPE_MODE (TREE_TYPE (arg));
2210 if (builtin_optab)
2211 icode = optab_handler (builtin_optab, mode)->insn_code;
2213 /* Before working hard, check whether the instruction is available. */
2214 if (icode != CODE_FOR_nothing)
2216 /* Make a suitable register to place result in. */
2217 if (!target
2218 || GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2219 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2221 gcc_assert (insn_data[icode].operand[0].predicate
2222 (target, GET_MODE (target)));
2224 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2225 need to expand the argument again. This way, we will not perform
2226 side-effects more the once. */
2227 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2229 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2231 if (mode != GET_MODE (op0))
2232 op0 = convert_to_mode (mode, op0, 0);
2234 /* Compute into TARGET.
2235 Set TARGET to wherever the result comes back. */
2236 emit_unop_insn (icode, target, op0, UNKNOWN);
2237 return target;
2240 /* If there is no optab, try generic code. */
2241 switch (DECL_FUNCTION_CODE (fndecl))
2243 tree result;
2245 CASE_FLT_FN (BUILT_IN_ISINF):
2247 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2248 tree const isgr_fn = built_in_decls[BUILT_IN_ISGREATER];
2249 tree const type = TREE_TYPE (arg);
2250 REAL_VALUE_TYPE r;
2251 char buf[128];
2253 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2254 real_from_string (&r, buf);
2255 result = build_call_expr (isgr_fn, 2,
2256 fold_build1 (ABS_EXPR, type, arg),
2257 build_real (type, r));
2258 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2260 CASE_FLT_FN (BUILT_IN_FINITE):
2261 case BUILT_IN_ISFINITE:
2263 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2264 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2265 tree const type = TREE_TYPE (arg);
2266 REAL_VALUE_TYPE r;
2267 char buf[128];
2269 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2270 real_from_string (&r, buf);
2271 result = build_call_expr (isle_fn, 2,
2272 fold_build1 (ABS_EXPR, type, arg),
2273 build_real (type, r));
2274 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2276 case BUILT_IN_ISNORMAL:
2278 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2279 islessequal(fabs(x),DBL_MAX). */
2280 tree const isle_fn = built_in_decls[BUILT_IN_ISLESSEQUAL];
2281 tree const isge_fn = built_in_decls[BUILT_IN_ISGREATEREQUAL];
2282 tree const type = TREE_TYPE (arg);
2283 REAL_VALUE_TYPE rmax, rmin;
2284 char buf[128];
2286 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
2287 real_from_string (&rmax, buf);
2288 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
2289 real_from_string (&rmin, buf);
2290 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
2291 result = build_call_expr (isle_fn, 2, arg,
2292 build_real (type, rmax));
2293 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
2294 build_call_expr (isge_fn, 2, arg,
2295 build_real (type, rmin)));
2296 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
2298 default:
2299 break;
2302 target = expand_call (exp, target, target == const0_rtx);
2304 return target;
2307 /* Expand a call to the builtin sincos math function.
2308 Return NULL_RTX if a normal call should be emitted rather than expanding the
2309 function in-line. EXP is the expression that is a call to the builtin
2310 function. */
2312 static rtx
2313 expand_builtin_sincos (tree exp)
2315 rtx op0, op1, op2, target1, target2;
2316 enum machine_mode mode;
2317 tree arg, sinp, cosp;
2318 int result;
2320 if (!validate_arglist (exp, REAL_TYPE,
2321 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2322 return NULL_RTX;
2324 arg = CALL_EXPR_ARG (exp, 0);
2325 sinp = CALL_EXPR_ARG (exp, 1);
2326 cosp = CALL_EXPR_ARG (exp, 2);
2328 /* Make a suitable register to place result in. */
2329 mode = TYPE_MODE (TREE_TYPE (arg));
2331 /* Check if sincos insn is available, otherwise emit the call. */
2332 if (optab_handler (sincos_optab, mode)->insn_code == CODE_FOR_nothing)
2333 return NULL_RTX;
2335 target1 = gen_reg_rtx (mode);
2336 target2 = gen_reg_rtx (mode);
2338 op0 = expand_normal (arg);
2339 op1 = expand_normal (build_fold_indirect_ref (sinp));
2340 op2 = expand_normal (build_fold_indirect_ref (cosp));
2342 /* Compute into target1 and target2.
2343 Set TARGET to wherever the result comes back. */
2344 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2345 gcc_assert (result);
2347 /* Move target1 and target2 to the memory locations indicated
2348 by op1 and op2. */
2349 emit_move_insn (op1, target1);
2350 emit_move_insn (op2, target2);
2352 return const0_rtx;
2355 /* Expand a call to the internal cexpi builtin to the sincos math function.
2356 EXP is the expression that is a call to the builtin function; if convenient,
2357 the result should be placed in TARGET. SUBTARGET may be used as the target
2358 for computing one of EXP's operands. */
2360 static rtx
2361 expand_builtin_cexpi (tree exp, rtx target, rtx subtarget)
2363 tree fndecl = get_callee_fndecl (exp);
2364 tree arg, type;
2365 enum machine_mode mode;
2366 rtx op0, op1, op2;
2368 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2369 return NULL_RTX;
2371 arg = CALL_EXPR_ARG (exp, 0);
2372 type = TREE_TYPE (arg);
2373 mode = TYPE_MODE (TREE_TYPE (arg));
2375 /* Try expanding via a sincos optab, fall back to emitting a libcall
2376 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2377 is only generated from sincos, cexp or if we have either of them. */
2378 if (optab_handler (sincos_optab, mode)->insn_code != CODE_FOR_nothing)
2380 op1 = gen_reg_rtx (mode);
2381 op2 = gen_reg_rtx (mode);
2383 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2385 /* Compute into op1 and op2. */
2386 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2388 else if (TARGET_HAS_SINCOS)
2390 tree call, fn = NULL_TREE;
2391 tree top1, top2;
2392 rtx op1a, op2a;
2394 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2395 fn = built_in_decls[BUILT_IN_SINCOSF];
2396 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2397 fn = built_in_decls[BUILT_IN_SINCOS];
2398 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2399 fn = built_in_decls[BUILT_IN_SINCOSL];
2400 else
2401 gcc_unreachable ();
2403 op1 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2404 op2 = assign_temp (TREE_TYPE (arg), 0, 1, 1);
2405 op1a = copy_to_mode_reg (Pmode, XEXP (op1, 0));
2406 op2a = copy_to_mode_reg (Pmode, XEXP (op2, 0));
2407 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2408 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2410 /* Make sure not to fold the sincos call again. */
2411 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2412 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2413 call, 3, arg, top1, top2));
2415 else
2417 tree call, fn = NULL_TREE, narg;
2418 tree ctype = build_complex_type (type);
2420 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2421 fn = built_in_decls[BUILT_IN_CEXPF];
2422 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2423 fn = built_in_decls[BUILT_IN_CEXP];
2424 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2425 fn = built_in_decls[BUILT_IN_CEXPL];
2426 else
2427 gcc_unreachable ();
2429 /* If we don't have a decl for cexp create one. This is the
2430 friendliest fallback if the user calls __builtin_cexpi
2431 without full target C99 function support. */
2432 if (fn == NULL_TREE)
2434 tree fntype;
2435 const char *name = NULL;
2437 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2438 name = "cexpf";
2439 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2440 name = "cexp";
2441 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2442 name = "cexpl";
2444 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2445 fn = build_fn_decl (name, fntype);
2448 narg = fold_build2 (COMPLEX_EXPR, ctype,
2449 build_real (type, dconst0), arg);
2451 /* Make sure not to fold the cexp call again. */
2452 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2453 return expand_expr (build_call_nary (ctype, call, 1, narg),
2454 target, VOIDmode, EXPAND_NORMAL);
2457 /* Now build the proper return type. */
2458 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2459 make_tree (TREE_TYPE (arg), op2),
2460 make_tree (TREE_TYPE (arg), op1)),
2461 target, VOIDmode, EXPAND_NORMAL);
2464 /* Expand a call to one of the builtin rounding functions gcc defines
2465 as an extension (lfloor and lceil). As these are gcc extensions we
2466 do not need to worry about setting errno to EDOM.
2467 If expanding via optab fails, lower expression to (int)(floor(x)).
2468 EXP is the expression that is a call to the builtin function;
2469 if convenient, the result should be placed in TARGET. SUBTARGET may
2470 be used as the target for computing one of EXP's operands. */
2472 static rtx
2473 expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2475 convert_optab builtin_optab;
2476 rtx op0, insns, tmp;
2477 tree fndecl = get_callee_fndecl (exp);
2478 enum built_in_function fallback_fn;
2479 tree fallback_fndecl;
2480 enum machine_mode mode;
2481 tree arg;
2483 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2484 gcc_unreachable ();
2486 arg = CALL_EXPR_ARG (exp, 0);
2488 switch (DECL_FUNCTION_CODE (fndecl))
2490 CASE_FLT_FN (BUILT_IN_LCEIL):
2491 CASE_FLT_FN (BUILT_IN_LLCEIL):
2492 builtin_optab = lceil_optab;
2493 fallback_fn = BUILT_IN_CEIL;
2494 break;
2496 CASE_FLT_FN (BUILT_IN_LFLOOR):
2497 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2498 builtin_optab = lfloor_optab;
2499 fallback_fn = BUILT_IN_FLOOR;
2500 break;
2502 default:
2503 gcc_unreachable ();
2506 /* Make a suitable register to place result in. */
2507 mode = TYPE_MODE (TREE_TYPE (exp));
2509 target = gen_reg_rtx (mode);
2511 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2512 need to expand the argument again. This way, we will not perform
2513 side-effects more the once. */
2514 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2516 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2518 start_sequence ();
2520 /* Compute into TARGET. */
2521 if (expand_sfix_optab (target, op0, builtin_optab))
2523 /* Output the entire sequence. */
2524 insns = get_insns ();
2525 end_sequence ();
2526 emit_insn (insns);
2527 return target;
2530 /* If we were unable to expand via the builtin, stop the sequence
2531 (without outputting the insns). */
2532 end_sequence ();
2534 /* Fall back to floating point rounding optab. */
2535 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2537 /* For non-C99 targets we may end up without a fallback fndecl here
2538 if the user called __builtin_lfloor directly. In this case emit
2539 a call to the floor/ceil variants nevertheless. This should result
2540 in the best user experience for not full C99 targets. */
2541 if (fallback_fndecl == NULL_TREE)
2543 tree fntype;
2544 const char *name = NULL;
2546 switch (DECL_FUNCTION_CODE (fndecl))
2548 case BUILT_IN_LCEIL:
2549 case BUILT_IN_LLCEIL:
2550 name = "ceil";
2551 break;
2552 case BUILT_IN_LCEILF:
2553 case BUILT_IN_LLCEILF:
2554 name = "ceilf";
2555 break;
2556 case BUILT_IN_LCEILL:
2557 case BUILT_IN_LLCEILL:
2558 name = "ceill";
2559 break;
2560 case BUILT_IN_LFLOOR:
2561 case BUILT_IN_LLFLOOR:
2562 name = "floor";
2563 break;
2564 case BUILT_IN_LFLOORF:
2565 case BUILT_IN_LLFLOORF:
2566 name = "floorf";
2567 break;
2568 case BUILT_IN_LFLOORL:
2569 case BUILT_IN_LLFLOORL:
2570 name = "floorl";
2571 break;
2572 default:
2573 gcc_unreachable ();
2576 fntype = build_function_type_list (TREE_TYPE (arg),
2577 TREE_TYPE (arg), NULL_TREE);
2578 fallback_fndecl = build_fn_decl (name, fntype);
2581 exp = build_call_expr (fallback_fndecl, 1, arg);
2583 tmp = expand_normal (exp);
2585 /* Truncate the result of floating point optab to integer
2586 via expand_fix (). */
2587 target = gen_reg_rtx (mode);
2588 expand_fix (target, tmp, 0);
2590 return target;
2593 /* Expand a call to one of the builtin math functions doing integer
2594 conversion (lrint).
2595 Return 0 if a normal call should be emitted rather than expanding the
2596 function in-line. EXP is the expression that is a call to the builtin
2597 function; if convenient, the result should be placed in TARGET.
2598 SUBTARGET may be used as the target for computing one of EXP's operands. */
2600 static rtx
2601 expand_builtin_int_roundingfn_2 (tree exp, rtx target, rtx subtarget)
2603 convert_optab builtin_optab;
2604 rtx op0, insns;
2605 tree fndecl = get_callee_fndecl (exp);
2606 tree arg;
2607 enum machine_mode mode;
2609 /* There's no easy way to detect the case we need to set EDOM. */
2610 if (flag_errno_math)
2611 return NULL_RTX;
2613 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2614 gcc_unreachable ();
2616 arg = CALL_EXPR_ARG (exp, 0);
2618 switch (DECL_FUNCTION_CODE (fndecl))
2620 CASE_FLT_FN (BUILT_IN_LRINT):
2621 CASE_FLT_FN (BUILT_IN_LLRINT):
2622 builtin_optab = lrint_optab; break;
2623 CASE_FLT_FN (BUILT_IN_LROUND):
2624 CASE_FLT_FN (BUILT_IN_LLROUND):
2625 builtin_optab = lround_optab; break;
2626 default:
2627 gcc_unreachable ();
2630 /* Make a suitable register to place result in. */
2631 mode = TYPE_MODE (TREE_TYPE (exp));
2633 target = gen_reg_rtx (mode);
2635 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2636 need to expand the argument again. This way, we will not perform
2637 side-effects more the once. */
2638 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2640 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2642 start_sequence ();
2644 if (expand_sfix_optab (target, op0, builtin_optab))
2646 /* Output the entire sequence. */
2647 insns = get_insns ();
2648 end_sequence ();
2649 emit_insn (insns);
2650 return target;
2653 /* If we were unable to expand via the builtin, stop the sequence
2654 (without outputting the insns) and call to the library function
2655 with the stabilized argument list. */
2656 end_sequence ();
2658 target = expand_call (exp, target, target == const0_rtx);
2660 return target;
2663 /* To evaluate powi(x,n), the floating point value x raised to the
2664 constant integer exponent n, we use a hybrid algorithm that
2665 combines the "window method" with look-up tables. For an
2666 introduction to exponentiation algorithms and "addition chains",
2667 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2668 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2669 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2670 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2672 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2673 multiplications to inline before calling the system library's pow
2674 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2675 so this default never requires calling pow, powf or powl. */
2677 #ifndef POWI_MAX_MULTS
2678 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2679 #endif
2681 /* The size of the "optimal power tree" lookup table. All
2682 exponents less than this value are simply looked up in the
2683 powi_table below. This threshold is also used to size the
2684 cache of pseudo registers that hold intermediate results. */
2685 #define POWI_TABLE_SIZE 256
2687 /* The size, in bits of the window, used in the "window method"
2688 exponentiation algorithm. This is equivalent to a radix of
2689 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2690 #define POWI_WINDOW_SIZE 3
2692 /* The following table is an efficient representation of an
2693 "optimal power tree". For each value, i, the corresponding
2694 value, j, in the table states than an optimal evaluation
2695 sequence for calculating pow(x,i) can be found by evaluating
2696 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2697 100 integers is given in Knuth's "Seminumerical algorithms". */
2699 static const unsigned char powi_table[POWI_TABLE_SIZE] =
2701 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2702 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2703 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2704 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2705 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2706 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2707 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2708 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2709 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2710 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2711 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2712 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2713 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2714 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2715 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2716 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2717 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2718 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2719 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2720 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2721 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2722 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2723 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2724 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2725 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2726 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2727 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2728 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2729 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2730 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2731 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2732 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2736 /* Return the number of multiplications required to calculate
2737 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2738 subroutine of powi_cost. CACHE is an array indicating
2739 which exponents have already been calculated. */
2741 static int
2742 powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2744 /* If we've already calculated this exponent, then this evaluation
2745 doesn't require any additional multiplications. */
2746 if (cache[n])
2747 return 0;
2749 cache[n] = true;
2750 return powi_lookup_cost (n - powi_table[n], cache)
2751 + powi_lookup_cost (powi_table[n], cache) + 1;
2754 /* Return the number of multiplications required to calculate
2755 powi(x,n) for an arbitrary x, given the exponent N. This
2756 function needs to be kept in sync with expand_powi below. */
2758 static int
2759 powi_cost (HOST_WIDE_INT n)
2761 bool cache[POWI_TABLE_SIZE];
2762 unsigned HOST_WIDE_INT digit;
2763 unsigned HOST_WIDE_INT val;
2764 int result;
2766 if (n == 0)
2767 return 0;
2769 /* Ignore the reciprocal when calculating the cost. */
2770 val = (n < 0) ? -n : n;
2772 /* Initialize the exponent cache. */
2773 memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2774 cache[1] = true;
2776 result = 0;
2778 while (val >= POWI_TABLE_SIZE)
2780 if (val & 1)
2782 digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2783 result += powi_lookup_cost (digit, cache)
2784 + POWI_WINDOW_SIZE + 1;
2785 val >>= POWI_WINDOW_SIZE;
2787 else
2789 val >>= 1;
2790 result++;
2794 return result + powi_lookup_cost (val, cache);
2797 /* Recursive subroutine of expand_powi. This function takes the array,
2798 CACHE, of already calculated exponents and an exponent N and returns
2799 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2801 static rtx
2802 expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2804 unsigned HOST_WIDE_INT digit;
2805 rtx target, result;
2806 rtx op0, op1;
2808 if (n < POWI_TABLE_SIZE)
2810 if (cache[n])
2811 return cache[n];
2813 target = gen_reg_rtx (mode);
2814 cache[n] = target;
2816 op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2817 op1 = expand_powi_1 (mode, powi_table[n], cache);
2819 else if (n & 1)
2821 target = gen_reg_rtx (mode);
2822 digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2823 op0 = expand_powi_1 (mode, n - digit, cache);
2824 op1 = expand_powi_1 (mode, digit, cache);
2826 else
2828 target = gen_reg_rtx (mode);
2829 op0 = expand_powi_1 (mode, n >> 1, cache);
2830 op1 = op0;
2833 result = expand_mult (mode, op0, op1, target, 0);
2834 if (result != target)
2835 emit_move_insn (target, result);
2836 return target;
2839 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2840 floating point operand in mode MODE, and N is the exponent. This
2841 function needs to be kept in sync with powi_cost above. */
2843 static rtx
2844 expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2846 unsigned HOST_WIDE_INT val;
2847 rtx cache[POWI_TABLE_SIZE];
2848 rtx result;
2850 if (n == 0)
2851 return CONST1_RTX (mode);
2853 val = (n < 0) ? -n : n;
2855 memset (cache, 0, sizeof (cache));
2856 cache[1] = x;
2858 result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2860 /* If the original exponent was negative, reciprocate the result. */
2861 if (n < 0)
2862 result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2863 result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2865 return result;
2868 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2869 a normal call should be emitted rather than expanding the function
2870 in-line. EXP is the expression that is a call to the builtin
2871 function; if convenient, the result should be placed in TARGET. */
2873 static rtx
2874 expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2876 tree arg0, arg1;
2877 tree fn, narg0;
2878 tree type = TREE_TYPE (exp);
2879 REAL_VALUE_TYPE cint, c, c2;
2880 HOST_WIDE_INT n;
2881 rtx op, op2;
2882 enum machine_mode mode = TYPE_MODE (type);
2884 if (! validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2885 return NULL_RTX;
2887 arg0 = CALL_EXPR_ARG (exp, 0);
2888 arg1 = CALL_EXPR_ARG (exp, 1);
2890 if (TREE_CODE (arg1) != REAL_CST
2891 || TREE_OVERFLOW (arg1))
2892 return expand_builtin_mathfn_2 (exp, target, subtarget);
2894 /* Handle constant exponents. */
2896 /* For integer valued exponents we can expand to an optimal multiplication
2897 sequence using expand_powi. */
2898 c = TREE_REAL_CST (arg1);
2899 n = real_to_integer (&c);
2900 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2901 if (real_identical (&c, &cint)
2902 && ((n >= -1 && n <= 2)
2903 || (flag_unsafe_math_optimizations
2904 && !optimize_size
2905 && powi_cost (n) <= POWI_MAX_MULTS)))
2907 op = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2908 if (n != 1)
2910 op = force_reg (mode, op);
2911 op = expand_powi (op, mode, n);
2913 return op;
2916 narg0 = builtin_save_expr (arg0);
2918 /* If the exponent is not integer valued, check if it is half of an integer.
2919 In this case we can expand to sqrt (x) * x**(n/2). */
2920 fn = mathfn_built_in (type, BUILT_IN_SQRT);
2921 if (fn != NULL_TREE)
2923 real_arithmetic (&c2, MULT_EXPR, &c, &dconst2);
2924 n = real_to_integer (&c2);
2925 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2926 if (real_identical (&c2, &cint)
2927 && ((flag_unsafe_math_optimizations
2928 && !optimize_size
2929 && powi_cost (n/2) <= POWI_MAX_MULTS)
2930 || n == 1))
2932 tree call_expr = build_call_expr (fn, 1, narg0);
2933 /* Use expand_expr in case the newly built call expression
2934 was folded to a non-call. */
2935 op = expand_expr (call_expr, subtarget, mode, EXPAND_NORMAL);
2936 if (n != 1)
2938 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2939 op2 = force_reg (mode, op2);
2940 op2 = expand_powi (op2, mode, abs (n / 2));
2941 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2942 0, OPTAB_LIB_WIDEN);
2943 /* If the original exponent was negative, reciprocate the
2944 result. */
2945 if (n < 0)
2946 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2947 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2949 return op;
2953 /* Try if the exponent is a third of an integer. In this case
2954 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2955 different from pow (x, 1./3.) due to rounding and behavior
2956 with negative x we need to constrain this transformation to
2957 unsafe math and positive x or finite math. */
2958 fn = mathfn_built_in (type, BUILT_IN_CBRT);
2959 if (fn != NULL_TREE
2960 && flag_unsafe_math_optimizations
2961 && (tree_expr_nonnegative_p (arg0)
2962 || !HONOR_NANS (mode)))
2964 REAL_VALUE_TYPE dconst3;
2965 real_from_integer (&dconst3, VOIDmode, 3, 0, 0);
2966 real_arithmetic (&c2, MULT_EXPR, &c, &dconst3);
2967 real_round (&c2, mode, &c2);
2968 n = real_to_integer (&c2);
2969 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2970 real_arithmetic (&c2, RDIV_EXPR, &cint, &dconst3);
2971 real_convert (&c2, mode, &c2);
2972 if (real_identical (&c2, &c)
2973 && ((!optimize_size
2974 && powi_cost (n/3) <= POWI_MAX_MULTS)
2975 || n == 1))
2977 tree call_expr = build_call_expr (fn, 1,narg0);
2978 op = expand_builtin (call_expr, NULL_RTX, subtarget, mode, 0);
2979 if (abs (n) % 3 == 2)
2980 op = expand_simple_binop (mode, MULT, op, op, op,
2981 0, OPTAB_LIB_WIDEN);
2982 if (n != 1)
2984 op2 = expand_expr (narg0, subtarget, VOIDmode, EXPAND_NORMAL);
2985 op2 = force_reg (mode, op2);
2986 op2 = expand_powi (op2, mode, abs (n / 3));
2987 op = expand_simple_binop (mode, MULT, op, op2, NULL_RTX,
2988 0, OPTAB_LIB_WIDEN);
2989 /* If the original exponent was negative, reciprocate the
2990 result. */
2991 if (n < 0)
2992 op = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2993 op, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2995 return op;
2999 /* Fall back to optab expansion. */
3000 return expand_builtin_mathfn_2 (exp, target, subtarget);
3003 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3004 a normal call should be emitted rather than expanding the function
3005 in-line. EXP is the expression that is a call to the builtin
3006 function; if convenient, the result should be placed in TARGET. */
3008 static rtx
3009 expand_builtin_powi (tree exp, rtx target, rtx subtarget)
3011 tree arg0, arg1;
3012 rtx op0, op1;
3013 enum machine_mode mode;
3014 enum machine_mode mode2;
3016 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3017 return NULL_RTX;
3019 arg0 = CALL_EXPR_ARG (exp, 0);
3020 arg1 = CALL_EXPR_ARG (exp, 1);
3021 mode = TYPE_MODE (TREE_TYPE (exp));
3023 /* Handle constant power. */
3025 if (TREE_CODE (arg1) == INTEGER_CST
3026 && !TREE_OVERFLOW (arg1))
3028 HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
3030 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3031 Otherwise, check the number of multiplications required. */
3032 if ((TREE_INT_CST_HIGH (arg1) == 0
3033 || TREE_INT_CST_HIGH (arg1) == -1)
3034 && ((n >= -1 && n <= 2)
3035 || (! optimize_size
3036 && powi_cost (n) <= POWI_MAX_MULTS)))
3038 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3039 op0 = force_reg (mode, op0);
3040 return expand_powi (op0, mode, n);
3044 /* Emit a libcall to libgcc. */
3046 /* Mode of the 2nd argument must match that of an int. */
3047 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
3049 if (target == NULL_RTX)
3050 target = gen_reg_rtx (mode);
3052 op0 = expand_expr (arg0, subtarget, mode, EXPAND_NORMAL);
3053 if (GET_MODE (op0) != mode)
3054 op0 = convert_to_mode (mode, op0, 0);
3055 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3056 if (GET_MODE (op1) != mode2)
3057 op1 = convert_to_mode (mode2, op1, 0);
3059 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3060 target, LCT_CONST, mode, 2,
3061 op0, mode, op1, mode2);
3063 return target;
3066 /* Expand expression EXP which is a call to the strlen builtin. Return
3067 NULL_RTX if we failed the caller should emit a normal call, otherwise
3068 try to get the result in TARGET, if convenient. */
3070 static rtx
3071 expand_builtin_strlen (tree exp, rtx target,
3072 enum machine_mode target_mode)
3074 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3075 return NULL_RTX;
3076 else
3078 rtx pat;
3079 tree len;
3080 tree src = CALL_EXPR_ARG (exp, 0);
3081 rtx result, src_reg, char_rtx, before_strlen;
3082 enum machine_mode insn_mode = target_mode, char_mode;
3083 enum insn_code icode = CODE_FOR_nothing;
3084 int align;
3086 /* If the length can be computed at compile-time, return it. */
3087 len = c_strlen (src, 0);
3088 if (len)
3089 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3091 /* If the length can be computed at compile-time and is constant
3092 integer, but there are side-effects in src, evaluate
3093 src for side-effects, then return len.
3094 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3095 can be optimized into: i++; x = 3; */
3096 len = c_strlen (src, 1);
3097 if (len && TREE_CODE (len) == INTEGER_CST)
3099 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3100 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3103 align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3105 /* If SRC is not a pointer type, don't do this operation inline. */
3106 if (align == 0)
3107 return NULL_RTX;
3109 /* Bail out if we can't compute strlen in the right mode. */
3110 while (insn_mode != VOIDmode)
3112 icode = optab_handler (strlen_optab, insn_mode)->insn_code;
3113 if (icode != CODE_FOR_nothing)
3114 break;
3116 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3118 if (insn_mode == VOIDmode)
3119 return NULL_RTX;
3121 /* Make a place to write the result of the instruction. */
3122 result = target;
3123 if (! (result != 0
3124 && REG_P (result)
3125 && GET_MODE (result) == insn_mode
3126 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3127 result = gen_reg_rtx (insn_mode);
3129 /* Make a place to hold the source address. We will not expand
3130 the actual source until we are sure that the expansion will
3131 not fail -- there are trees that cannot be expanded twice. */
3132 src_reg = gen_reg_rtx (Pmode);
3134 /* Mark the beginning of the strlen sequence so we can emit the
3135 source operand later. */
3136 before_strlen = get_last_insn ();
3138 char_rtx = const0_rtx;
3139 char_mode = insn_data[(int) icode].operand[2].mode;
3140 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
3141 char_mode))
3142 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
3144 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
3145 char_rtx, GEN_INT (align));
3146 if (! pat)
3147 return NULL_RTX;
3148 emit_insn (pat);
3150 /* Now that we are assured of success, expand the source. */
3151 start_sequence ();
3152 pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
3153 if (pat != src_reg)
3154 emit_move_insn (src_reg, pat);
3155 pat = get_insns ();
3156 end_sequence ();
3158 if (before_strlen)
3159 emit_insn_after (pat, before_strlen);
3160 else
3161 emit_insn_before (pat, get_insns ());
3163 /* Return the value in the proper mode for this function. */
3164 if (GET_MODE (result) == target_mode)
3165 target = result;
3166 else if (target != 0)
3167 convert_move (target, result, 0);
3168 else
3169 target = convert_to_mode (target_mode, result, 0);
3171 return target;
3175 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3176 caller should emit a normal call, otherwise try to get the result
3177 in TARGET, if convenient (and in mode MODE if that's convenient). */
3179 static rtx
3180 expand_builtin_strstr (tree exp, rtx target, enum machine_mode mode)
3182 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3184 tree type = TREE_TYPE (exp);
3185 tree result = fold_builtin_strstr (CALL_EXPR_ARG (exp, 0),
3186 CALL_EXPR_ARG (exp, 1), type);
3187 if (result)
3188 return expand_expr (result, target, mode, EXPAND_NORMAL);
3190 return NULL_RTX;
3193 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3194 caller should emit a normal call, otherwise try to get the result
3195 in TARGET, if convenient (and in mode MODE if that's convenient). */
3197 static rtx
3198 expand_builtin_strchr (tree exp, rtx target, enum machine_mode mode)
3200 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3202 tree type = TREE_TYPE (exp);
3203 tree result = fold_builtin_strchr (CALL_EXPR_ARG (exp, 0),
3204 CALL_EXPR_ARG (exp, 1), type);
3205 if (result)
3206 return expand_expr (result, target, mode, EXPAND_NORMAL);
3208 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3210 return NULL_RTX;
3213 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3214 caller should emit a normal call, otherwise try to get the result
3215 in TARGET, if convenient (and in mode MODE if that's convenient). */
3217 static rtx
3218 expand_builtin_strrchr (tree exp, rtx target, enum machine_mode mode)
3220 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3222 tree type = TREE_TYPE (exp);
3223 tree result = fold_builtin_strrchr (CALL_EXPR_ARG (exp, 0),
3224 CALL_EXPR_ARG (exp, 1), type);
3225 if (result)
3226 return expand_expr (result, target, mode, EXPAND_NORMAL);
3228 return NULL_RTX;
3231 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3232 caller should emit a normal call, otherwise try to get the result
3233 in TARGET, if convenient (and in mode MODE if that's convenient). */
3235 static rtx
3236 expand_builtin_strpbrk (tree exp, rtx target, enum machine_mode mode)
3238 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3240 tree type = TREE_TYPE (exp);
3241 tree result = fold_builtin_strpbrk (CALL_EXPR_ARG (exp, 0),
3242 CALL_EXPR_ARG (exp, 1), type);
3243 if (result)
3244 return expand_expr (result, target, mode, EXPAND_NORMAL);
3246 return NULL_RTX;
3249 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3250 bytes from constant string DATA + OFFSET and return it as target
3251 constant. */
3253 static rtx
3254 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3255 enum machine_mode mode)
3257 const char *str = (const char *) data;
3259 gcc_assert (offset >= 0
3260 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3261 <= strlen (str) + 1));
3263 return c_readstr (str + offset, mode);
3266 /* Expand a call EXP to the memcpy builtin.
3267 Return NULL_RTX if we failed, the caller should emit a normal call,
3268 otherwise try to get the result in TARGET, if convenient (and in
3269 mode MODE if that's convenient). */
3271 static rtx
3272 expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
3274 tree fndecl = get_callee_fndecl (exp);
3276 if (!validate_arglist (exp,
3277 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3278 return NULL_RTX;
3279 else
3281 tree dest = CALL_EXPR_ARG (exp, 0);
3282 tree src = CALL_EXPR_ARG (exp, 1);
3283 tree len = CALL_EXPR_ARG (exp, 2);
3284 const char *src_str;
3285 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3286 unsigned int dest_align
3287 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3288 rtx dest_mem, src_mem, dest_addr, len_rtx;
3289 tree result = fold_builtin_memory_op (dest, src, len,
3290 TREE_TYPE (TREE_TYPE (fndecl)),
3291 false, /*endp=*/0);
3292 HOST_WIDE_INT expected_size = -1;
3293 unsigned int expected_align = 0;
3295 if (result)
3297 while (TREE_CODE (result) == COMPOUND_EXPR)
3299 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3300 EXPAND_NORMAL);
3301 result = TREE_OPERAND (result, 1);
3303 return expand_expr (result, target, mode, EXPAND_NORMAL);
3306 /* If DEST is not a pointer type, call the normal function. */
3307 if (dest_align == 0)
3308 return NULL_RTX;
3310 /* If either SRC is not a pointer type, don't do this
3311 operation in-line. */
3312 if (src_align == 0)
3313 return NULL_RTX;
3315 stringop_block_profile (exp, &expected_align, &expected_size);
3316 if (expected_align < dest_align)
3317 expected_align = dest_align;
3318 dest_mem = get_memory_rtx (dest, len);
3319 set_mem_align (dest_mem, dest_align);
3320 len_rtx = expand_normal (len);
3321 src_str = c_getstr (src);
3323 /* If SRC is a string constant and block move would be done
3324 by pieces, we can avoid loading the string from memory
3325 and only stored the computed constants. */
3326 if (src_str
3327 && GET_CODE (len_rtx) == CONST_INT
3328 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3329 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3330 CONST_CAST (char *, src_str),
3331 dest_align, false))
3333 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3334 builtin_memcpy_read_str,
3335 CONST_CAST (char *, src_str),
3336 dest_align, false, 0);
3337 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3338 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3339 return dest_mem;
3342 src_mem = get_memory_rtx (src, len);
3343 set_mem_align (src_mem, src_align);
3345 /* Copy word part most expediently. */
3346 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3347 CALL_EXPR_TAILCALL (exp)
3348 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3349 expected_align, expected_size);
3351 if (dest_addr == 0)
3353 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3354 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3356 return dest_addr;
3360 /* Expand a call EXP to the mempcpy builtin.
3361 Return NULL_RTX if we failed; the caller should emit a normal call,
3362 otherwise try to get the result in TARGET, if convenient (and in
3363 mode MODE if that's convenient). If ENDP is 0 return the
3364 destination pointer, if ENDP is 1 return the end pointer ala
3365 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3366 stpcpy. */
3368 static rtx
3369 expand_builtin_mempcpy(tree exp, rtx target, enum machine_mode mode)
3371 if (!validate_arglist (exp,
3372 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3373 return NULL_RTX;
3374 else
3376 tree dest = CALL_EXPR_ARG (exp, 0);
3377 tree src = CALL_EXPR_ARG (exp, 1);
3378 tree len = CALL_EXPR_ARG (exp, 2);
3379 return expand_builtin_mempcpy_args (dest, src, len,
3380 TREE_TYPE (exp),
3381 target, mode, /*endp=*/ 1);
3385 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3386 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3387 so that this can also be called without constructing an actual CALL_EXPR.
3388 TYPE is the return type of the call. The other arguments and return value
3389 are the same as for expand_builtin_mempcpy. */
3391 static rtx
3392 expand_builtin_mempcpy_args (tree dest, tree src, tree len, tree type,
3393 rtx target, enum machine_mode mode, int endp)
3395 /* If return value is ignored, transform mempcpy into memcpy. */
3396 if (target == const0_rtx)
3398 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3400 if (!fn)
3401 return NULL_RTX;
3403 return expand_expr (build_call_expr (fn, 3, dest, src, len),
3404 target, mode, EXPAND_NORMAL);
3406 else
3408 const char *src_str;
3409 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3410 unsigned int dest_align
3411 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3412 rtx dest_mem, src_mem, len_rtx;
3413 tree result = fold_builtin_memory_op (dest, src, len, type, false, endp);
3415 if (result)
3417 while (TREE_CODE (result) == COMPOUND_EXPR)
3419 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3420 EXPAND_NORMAL);
3421 result = TREE_OPERAND (result, 1);
3423 return expand_expr (result, target, mode, EXPAND_NORMAL);
3426 /* If either SRC or DEST is not a pointer type, don't do this
3427 operation in-line. */
3428 if (dest_align == 0 || src_align == 0)
3429 return NULL_RTX;
3431 /* If LEN is not constant, call the normal function. */
3432 if (! host_integerp (len, 1))
3433 return NULL_RTX;
3435 len_rtx = expand_normal (len);
3436 src_str = c_getstr (src);
3438 /* If SRC is a string constant and block move would be done
3439 by pieces, we can avoid loading the string from memory
3440 and only stored the computed constants. */
3441 if (src_str
3442 && GET_CODE (len_rtx) == CONST_INT
3443 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3444 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3445 CONST_CAST (char *, src_str),
3446 dest_align, false))
3448 dest_mem = get_memory_rtx (dest, len);
3449 set_mem_align (dest_mem, dest_align);
3450 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3451 builtin_memcpy_read_str,
3452 CONST_CAST (char *, src_str),
3453 dest_align, false, endp);
3454 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3455 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3456 return dest_mem;
3459 if (GET_CODE (len_rtx) == CONST_INT
3460 && can_move_by_pieces (INTVAL (len_rtx),
3461 MIN (dest_align, src_align)))
3463 dest_mem = get_memory_rtx (dest, len);
3464 set_mem_align (dest_mem, dest_align);
3465 src_mem = get_memory_rtx (src, len);
3466 set_mem_align (src_mem, src_align);
3467 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3468 MIN (dest_align, src_align), endp);
3469 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3470 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3471 return dest_mem;
3474 return NULL_RTX;
3478 /* Expand expression EXP, which is a call to the memmove builtin. Return
3479 NULL_RTX if we failed; the caller should emit a normal call. */
3481 static rtx
3482 expand_builtin_memmove (tree exp, rtx target, enum machine_mode mode, int ignore)
3484 if (!validate_arglist (exp,
3485 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3486 return NULL_RTX;
3487 else
3489 tree dest = CALL_EXPR_ARG (exp, 0);
3490 tree src = CALL_EXPR_ARG (exp, 1);
3491 tree len = CALL_EXPR_ARG (exp, 2);
3492 return expand_builtin_memmove_args (dest, src, len, TREE_TYPE (exp),
3493 target, mode, ignore);
3497 /* Helper function to do the actual work for expand_builtin_memmove. The
3498 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3499 so that this can also be called without constructing an actual CALL_EXPR.
3500 TYPE is the return type of the call. The other arguments and return value
3501 are the same as for expand_builtin_memmove. */
3503 static rtx
3504 expand_builtin_memmove_args (tree dest, tree src, tree len,
3505 tree type, rtx target, enum machine_mode mode,
3506 int ignore)
3508 tree result = fold_builtin_memory_op (dest, src, len, type, ignore, /*endp=*/3);
3510 if (result)
3512 STRIP_TYPE_NOPS (result);
3513 while (TREE_CODE (result) == COMPOUND_EXPR)
3515 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3516 EXPAND_NORMAL);
3517 result = TREE_OPERAND (result, 1);
3519 return expand_expr (result, target, mode, EXPAND_NORMAL);
3522 /* Otherwise, call the normal function. */
3523 return NULL_RTX;
3526 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3527 NULL_RTX if we failed the caller should emit a normal call. */
3529 static rtx
3530 expand_builtin_bcopy (tree exp, int ignore)
3532 tree type = TREE_TYPE (exp);
3533 tree src, dest, size;
3535 if (!validate_arglist (exp,
3536 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3537 return NULL_RTX;
3539 src = CALL_EXPR_ARG (exp, 0);
3540 dest = CALL_EXPR_ARG (exp, 1);
3541 size = CALL_EXPR_ARG (exp, 2);
3543 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3544 This is done this way so that if it isn't expanded inline, we fall
3545 back to calling bcopy instead of memmove. */
3546 return expand_builtin_memmove_args (dest, src,
3547 fold_convert (sizetype, size),
3548 type, const0_rtx, VOIDmode,
3549 ignore);
3552 #ifndef HAVE_movstr
3553 # define HAVE_movstr 0
3554 # define CODE_FOR_movstr CODE_FOR_nothing
3555 #endif
3557 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3558 we failed, the caller should emit a normal call, otherwise try to
3559 get the result in TARGET, if convenient. If ENDP is 0 return the
3560 destination pointer, if ENDP is 1 return the end pointer ala
3561 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3562 stpcpy. */
3564 static rtx
3565 expand_movstr (tree dest, tree src, rtx target, int endp)
3567 rtx end;
3568 rtx dest_mem;
3569 rtx src_mem;
3570 rtx insn;
3571 const struct insn_data * data;
3573 if (!HAVE_movstr)
3574 return NULL_RTX;
3576 dest_mem = get_memory_rtx (dest, NULL);
3577 src_mem = get_memory_rtx (src, NULL);
3578 if (!endp)
3580 target = force_reg (Pmode, XEXP (dest_mem, 0));
3581 dest_mem = replace_equiv_address (dest_mem, target);
3582 end = gen_reg_rtx (Pmode);
3584 else
3586 if (target == 0 || target == const0_rtx)
3588 end = gen_reg_rtx (Pmode);
3589 if (target == 0)
3590 target = end;
3592 else
3593 end = target;
3596 data = insn_data + CODE_FOR_movstr;
3598 if (data->operand[0].mode != VOIDmode)
3599 end = gen_lowpart (data->operand[0].mode, end);
3601 insn = data->genfun (end, dest_mem, src_mem);
3603 gcc_assert (insn);
3605 emit_insn (insn);
3607 /* movstr is supposed to set end to the address of the NUL
3608 terminator. If the caller requested a mempcpy-like return value,
3609 adjust it. */
3610 if (endp == 1 && target != const0_rtx)
3612 rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3613 emit_move_insn (target, force_operand (tem, NULL_RTX));
3616 return target;
3619 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3620 NULL_RTX if we failed the caller should emit a normal call, otherwise
3621 try to get the result in TARGET, if convenient (and in mode MODE if that's
3622 convenient). */
3624 static rtx
3625 expand_builtin_strcpy (tree fndecl, tree exp, rtx target, enum machine_mode mode)
3627 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3629 tree dest = CALL_EXPR_ARG (exp, 0);
3630 tree src = CALL_EXPR_ARG (exp, 1);
3631 return expand_builtin_strcpy_args (fndecl, dest, src, target, mode);
3633 return NULL_RTX;
3636 /* Helper function to do the actual work for expand_builtin_strcpy. The
3637 arguments to the builtin_strcpy call DEST and SRC are broken out
3638 so that this can also be called without constructing an actual CALL_EXPR.
3639 The other arguments and return value are the same as for
3640 expand_builtin_strcpy. */
3642 static rtx
3643 expand_builtin_strcpy_args (tree fndecl, tree dest, tree src,
3644 rtx target, enum machine_mode mode)
3646 tree result = fold_builtin_strcpy (fndecl, dest, src, 0);
3647 if (result)
3648 return expand_expr (result, target, mode, EXPAND_NORMAL);
3649 return expand_movstr (dest, src, target, /*endp=*/0);
3653 /* Expand a call EXP to the stpcpy builtin.
3654 Return NULL_RTX if we failed the caller should emit a normal call,
3655 otherwise try to get the result in TARGET, if convenient (and in
3656 mode MODE if that's convenient). */
3658 static rtx
3659 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3661 tree dst, src;
3663 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3664 return NULL_RTX;
3666 dst = CALL_EXPR_ARG (exp, 0);
3667 src = CALL_EXPR_ARG (exp, 1);
3669 /* If return value is ignored, transform stpcpy into strcpy. */
3670 if (target == const0_rtx)
3672 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3673 if (!fn)
3674 return NULL_RTX;
3676 return expand_expr (build_call_expr (fn, 2, dst, src),
3677 target, mode, EXPAND_NORMAL);
3679 else
3681 tree len, lenp1;
3682 rtx ret;
3684 /* Ensure we get an actual string whose length can be evaluated at
3685 compile-time, not an expression containing a string. This is
3686 because the latter will potentially produce pessimized code
3687 when used to produce the return value. */
3688 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3689 return expand_movstr (dst, src, target, /*endp=*/2);
3691 lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3692 ret = expand_builtin_mempcpy_args (dst, src, lenp1, TREE_TYPE (exp),
3693 target, mode, /*endp=*/2);
3695 if (ret)
3696 return ret;
3698 if (TREE_CODE (len) == INTEGER_CST)
3700 rtx len_rtx = expand_normal (len);
3702 if (GET_CODE (len_rtx) == CONST_INT)
3704 ret = expand_builtin_strcpy_args (get_callee_fndecl (exp),
3705 dst, src, target, mode);
3707 if (ret)
3709 if (! target)
3711 if (mode != VOIDmode)
3712 target = gen_reg_rtx (mode);
3713 else
3714 target = gen_reg_rtx (GET_MODE (ret));
3716 if (GET_MODE (target) != GET_MODE (ret))
3717 ret = gen_lowpart (GET_MODE (target), ret);
3719 ret = plus_constant (ret, INTVAL (len_rtx));
3720 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3721 gcc_assert (ret);
3723 return target;
3728 return expand_movstr (dst, src, target, /*endp=*/2);
3732 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3733 bytes from constant string DATA + OFFSET and return it as target
3734 constant. */
3737 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3738 enum machine_mode mode)
3740 const char *str = (const char *) data;
3742 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3743 return const0_rtx;
3745 return c_readstr (str + offset, mode);
3748 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3749 NULL_RTX if we failed the caller should emit a normal call. */
3751 static rtx
3752 expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3754 tree fndecl = get_callee_fndecl (exp);
3756 if (validate_arglist (exp,
3757 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3759 tree dest = CALL_EXPR_ARG (exp, 0);
3760 tree src = CALL_EXPR_ARG (exp, 1);
3761 tree len = CALL_EXPR_ARG (exp, 2);
3762 tree slen = c_strlen (src, 1);
3763 tree result = fold_builtin_strncpy (fndecl, dest, src, len, slen);
3765 if (result)
3767 while (TREE_CODE (result) == COMPOUND_EXPR)
3769 expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3770 EXPAND_NORMAL);
3771 result = TREE_OPERAND (result, 1);
3773 return expand_expr (result, target, mode, EXPAND_NORMAL);
3776 /* We must be passed a constant len and src parameter. */
3777 if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3778 return NULL_RTX;
3780 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3782 /* We're required to pad with trailing zeros if the requested
3783 len is greater than strlen(s2)+1. In that case try to
3784 use store_by_pieces, if it fails, punt. */
3785 if (tree_int_cst_lt (slen, len))
3787 unsigned int dest_align
3788 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3789 const char *p = c_getstr (src);
3790 rtx dest_mem;
3792 if (!p || dest_align == 0 || !host_integerp (len, 1)
3793 || !can_store_by_pieces (tree_low_cst (len, 1),
3794 builtin_strncpy_read_str,
3795 CONST_CAST (char *, p),
3796 dest_align, false))
3797 return NULL_RTX;
3799 dest_mem = get_memory_rtx (dest, len);
3800 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3801 builtin_strncpy_read_str,
3802 CONST_CAST (char *, p), dest_align, false, 0);
3803 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3804 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3805 return dest_mem;
3808 return NULL_RTX;
3811 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3812 bytes from constant string DATA + OFFSET and return it as target
3813 constant. */
3816 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3817 enum machine_mode mode)
3819 const char *c = (const char *) data;
3820 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3822 memset (p, *c, GET_MODE_SIZE (mode));
3824 return c_readstr (p, mode);
3827 /* Callback routine for store_by_pieces. Return the RTL of a register
3828 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3829 char value given in the RTL register data. For example, if mode is
3830 4 bytes wide, return the RTL for 0x01010101*data. */
3832 static rtx
3833 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3834 enum machine_mode mode)
3836 rtx target, coeff;
3837 size_t size;
3838 char *p;
3840 size = GET_MODE_SIZE (mode);
3841 if (size == 1)
3842 return (rtx) data;
3844 p = XALLOCAVEC (char, size);
3845 memset (p, 1, size);
3846 coeff = c_readstr (p, mode);
3848 target = convert_to_mode (mode, (rtx) data, 1);
3849 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3850 return force_reg (mode, target);
3853 /* Expand expression EXP, which is a call to the memset builtin. Return
3854 NULL_RTX if we failed the caller should emit a normal call, otherwise
3855 try to get the result in TARGET, if convenient (and in mode MODE if that's
3856 convenient). */
3858 static rtx
3859 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3861 if (!validate_arglist (exp,
3862 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3863 return NULL_RTX;
3864 else
3866 tree dest = CALL_EXPR_ARG (exp, 0);
3867 tree val = CALL_EXPR_ARG (exp, 1);
3868 tree len = CALL_EXPR_ARG (exp, 2);
3869 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3873 /* Helper function to do the actual work for expand_builtin_memset. The
3874 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3875 so that this can also be called without constructing an actual CALL_EXPR.
3876 The other arguments and return value are the same as for
3877 expand_builtin_memset. */
3879 static rtx
3880 expand_builtin_memset_args (tree dest, tree val, tree len,
3881 rtx target, enum machine_mode mode, tree orig_exp)
3883 tree fndecl, fn;
3884 enum built_in_function fcode;
3885 char c;
3886 unsigned int dest_align;
3887 rtx dest_mem, dest_addr, len_rtx;
3888 HOST_WIDE_INT expected_size = -1;
3889 unsigned int expected_align = 0;
3891 dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3893 /* If DEST is not a pointer type, don't do this operation in-line. */
3894 if (dest_align == 0)
3895 return NULL_RTX;
3897 stringop_block_profile (orig_exp, &expected_align, &expected_size);
3898 if (expected_align < dest_align)
3899 expected_align = dest_align;
3901 /* If the LEN parameter is zero, return DEST. */
3902 if (integer_zerop (len))
3904 /* Evaluate and ignore VAL in case it has side-effects. */
3905 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3906 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3909 /* Stabilize the arguments in case we fail. */
3910 dest = builtin_save_expr (dest);
3911 val = builtin_save_expr (val);
3912 len = builtin_save_expr (len);
3914 len_rtx = expand_normal (len);
3915 dest_mem = get_memory_rtx (dest, len);
3917 if (TREE_CODE (val) != INTEGER_CST)
3919 rtx val_rtx;
3921 val_rtx = expand_normal (val);
3922 val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3923 val_rtx, 0);
3925 /* Assume that we can memset by pieces if we can store
3926 * the coefficients by pieces (in the required modes).
3927 * We can't pass builtin_memset_gen_str as that emits RTL. */
3928 c = 1;
3929 if (host_integerp (len, 1)
3930 && can_store_by_pieces (tree_low_cst (len, 1),
3931 builtin_memset_read_str, &c, dest_align,
3932 true))
3934 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3935 val_rtx);
3936 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3937 builtin_memset_gen_str, val_rtx, dest_align,
3938 true, 0);
3940 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3941 dest_align, expected_align,
3942 expected_size))
3943 goto do_libcall;
3945 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3946 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3947 return dest_mem;
3950 if (target_char_cast (val, &c))
3951 goto do_libcall;
3953 if (c)
3955 if (host_integerp (len, 1)
3956 && can_store_by_pieces (tree_low_cst (len, 1),
3957 builtin_memset_read_str, &c, dest_align,
3958 true))
3959 store_by_pieces (dest_mem, tree_low_cst (len, 1),
3960 builtin_memset_read_str, &c, dest_align, true, 0);
3961 else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3962 dest_align, expected_align,
3963 expected_size))
3964 goto do_libcall;
3966 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3967 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3968 return dest_mem;
3971 set_mem_align (dest_mem, dest_align);
3972 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3973 CALL_EXPR_TAILCALL (orig_exp)
3974 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3975 expected_align, expected_size);
3977 if (dest_addr == 0)
3979 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3980 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3983 return dest_addr;
3985 do_libcall:
3986 fndecl = get_callee_fndecl (orig_exp);
3987 fcode = DECL_FUNCTION_CODE (fndecl);
3988 if (fcode == BUILT_IN_MEMSET)
3989 fn = build_call_expr (fndecl, 3, dest, val, len);
3990 else if (fcode == BUILT_IN_BZERO)
3991 fn = build_call_expr (fndecl, 2, dest, len);
3992 else
3993 gcc_unreachable ();
3994 if (TREE_CODE (fn) == CALL_EXPR)
3995 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3996 return expand_call (fn, target, target == const0_rtx);
3999 /* Expand expression EXP, which is a call to the bzero builtin. Return
4000 NULL_RTX if we failed the caller should emit a normal call. */
4002 static rtx
4003 expand_builtin_bzero (tree exp)
4005 tree dest, size;
4007 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4008 return NULL_RTX;
4010 dest = CALL_EXPR_ARG (exp, 0);
4011 size = CALL_EXPR_ARG (exp, 1);
4013 /* New argument list transforming bzero(ptr x, int y) to
4014 memset(ptr x, int 0, size_t y). This is done this way
4015 so that if it isn't expanded inline, we fallback to
4016 calling bzero instead of memset. */
4018 return expand_builtin_memset_args (dest, integer_zero_node,
4019 fold_convert (sizetype, size),
4020 const0_rtx, VOIDmode, exp);
4023 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4024 caller should emit a normal call, otherwise try to get the result
4025 in TARGET, if convenient (and in mode MODE if that's convenient). */
4027 static rtx
4028 expand_builtin_memchr (tree exp, rtx target, enum machine_mode mode)
4030 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE,
4031 INTEGER_TYPE, VOID_TYPE))
4033 tree type = TREE_TYPE (exp);
4034 tree result = fold_builtin_memchr (CALL_EXPR_ARG (exp, 0),
4035 CALL_EXPR_ARG (exp, 1),
4036 CALL_EXPR_ARG (exp, 2), type);
4037 if (result)
4038 return expand_expr (result, target, mode, EXPAND_NORMAL);
4040 return NULL_RTX;
4043 /* Expand expression EXP, which is a call to the memcmp built-in function.
4044 Return NULL_RTX if we failed and the
4045 caller should emit a normal call, otherwise try to get the result in
4046 TARGET, if convenient (and in mode MODE, if that's convenient). */
4048 static rtx
4049 expand_builtin_memcmp (tree exp, rtx target, enum machine_mode mode)
4051 if (!validate_arglist (exp,
4052 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4053 return NULL_RTX;
4054 else
4056 tree result = fold_builtin_memcmp (CALL_EXPR_ARG (exp, 0),
4057 CALL_EXPR_ARG (exp, 1),
4058 CALL_EXPR_ARG (exp, 2));
4059 if (result)
4060 return expand_expr (result, target, mode, EXPAND_NORMAL);
4063 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4065 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4066 rtx result;
4067 rtx insn;
4068 tree arg1 = CALL_EXPR_ARG (exp, 0);
4069 tree arg2 = CALL_EXPR_ARG (exp, 1);
4070 tree len = CALL_EXPR_ARG (exp, 2);
4072 int arg1_align
4073 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4074 int arg2_align
4075 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4076 enum machine_mode insn_mode;
4078 #ifdef HAVE_cmpmemsi
4079 if (HAVE_cmpmemsi)
4080 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
4081 else
4082 #endif
4083 #ifdef HAVE_cmpstrnsi
4084 if (HAVE_cmpstrnsi)
4085 insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4086 else
4087 #endif
4088 return NULL_RTX;
4090 /* If we don't have POINTER_TYPE, call the function. */
4091 if (arg1_align == 0 || arg2_align == 0)
4092 return NULL_RTX;
4094 /* Make a place to write the result of the instruction. */
4095 result = target;
4096 if (! (result != 0
4097 && REG_P (result) && GET_MODE (result) == insn_mode
4098 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4099 result = gen_reg_rtx (insn_mode);
4101 arg1_rtx = get_memory_rtx (arg1, len);
4102 arg2_rtx = get_memory_rtx (arg2, len);
4103 arg3_rtx = expand_normal (len);
4105 /* Set MEM_SIZE as appropriate. */
4106 if (GET_CODE (arg3_rtx) == CONST_INT)
4108 set_mem_size (arg1_rtx, arg3_rtx);
4109 set_mem_size (arg2_rtx, arg3_rtx);
4112 #ifdef HAVE_cmpmemsi
4113 if (HAVE_cmpmemsi)
4114 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4115 GEN_INT (MIN (arg1_align, arg2_align)));
4116 else
4117 #endif
4118 #ifdef HAVE_cmpstrnsi
4119 if (HAVE_cmpstrnsi)
4120 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4121 GEN_INT (MIN (arg1_align, arg2_align)));
4122 else
4123 #endif
4124 gcc_unreachable ();
4126 if (insn)
4127 emit_insn (insn);
4128 else
4129 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4130 TYPE_MODE (integer_type_node), 3,
4131 XEXP (arg1_rtx, 0), Pmode,
4132 XEXP (arg2_rtx, 0), Pmode,
4133 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4134 TYPE_UNSIGNED (sizetype)),
4135 TYPE_MODE (sizetype));
4137 /* Return the value in the proper mode for this function. */
4138 mode = TYPE_MODE (TREE_TYPE (exp));
4139 if (GET_MODE (result) == mode)
4140 return result;
4141 else if (target != 0)
4143 convert_move (target, result, 0);
4144 return target;
4146 else
4147 return convert_to_mode (mode, result, 0);
4149 #endif
4151 return NULL_RTX;
4154 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4155 if we failed the caller should emit a normal call, otherwise try to get
4156 the result in TARGET, if convenient. */
4158 static rtx
4159 expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
4161 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4162 return NULL_RTX;
4163 else
4165 tree result = fold_builtin_strcmp (CALL_EXPR_ARG (exp, 0),
4166 CALL_EXPR_ARG (exp, 1));
4167 if (result)
4168 return expand_expr (result, target, mode, EXPAND_NORMAL);
4171 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4172 if (cmpstr_optab[SImode] != CODE_FOR_nothing
4173 || cmpstrn_optab[SImode] != CODE_FOR_nothing)
4175 rtx arg1_rtx, arg2_rtx;
4176 rtx result, insn = NULL_RTX;
4177 tree fndecl, fn;
4178 tree arg1 = CALL_EXPR_ARG (exp, 0);
4179 tree arg2 = CALL_EXPR_ARG (exp, 1);
4181 int arg1_align
4182 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4183 int arg2_align
4184 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4186 /* If we don't have POINTER_TYPE, call the function. */
4187 if (arg1_align == 0 || arg2_align == 0)
4188 return NULL_RTX;
4190 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4191 arg1 = builtin_save_expr (arg1);
4192 arg2 = builtin_save_expr (arg2);
4194 arg1_rtx = get_memory_rtx (arg1, NULL);
4195 arg2_rtx = get_memory_rtx (arg2, NULL);
4197 #ifdef HAVE_cmpstrsi
4198 /* Try to call cmpstrsi. */
4199 if (HAVE_cmpstrsi)
4201 enum machine_mode insn_mode
4202 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4204 /* Make a place to write the result of the instruction. */
4205 result = target;
4206 if (! (result != 0
4207 && REG_P (result) && GET_MODE (result) == insn_mode
4208 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4209 result = gen_reg_rtx (insn_mode);
4211 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4212 GEN_INT (MIN (arg1_align, arg2_align)));
4214 #endif
4215 #ifdef HAVE_cmpstrnsi
4216 /* Try to determine at least one length and call cmpstrnsi. */
4217 if (!insn && HAVE_cmpstrnsi)
4219 tree len;
4220 rtx arg3_rtx;
4222 enum machine_mode insn_mode
4223 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4224 tree len1 = c_strlen (arg1, 1);
4225 tree len2 = c_strlen (arg2, 1);
4227 if (len1)
4228 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4229 if (len2)
4230 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4232 /* If we don't have a constant length for the first, use the length
4233 of the second, if we know it. We don't require a constant for
4234 this case; some cost analysis could be done if both are available
4235 but neither is constant. For now, assume they're equally cheap,
4236 unless one has side effects. If both strings have constant lengths,
4237 use the smaller. */
4239 if (!len1)
4240 len = len2;
4241 else if (!len2)
4242 len = len1;
4243 else if (TREE_SIDE_EFFECTS (len1))
4244 len = len2;
4245 else if (TREE_SIDE_EFFECTS (len2))
4246 len = len1;
4247 else if (TREE_CODE (len1) != INTEGER_CST)
4248 len = len2;
4249 else if (TREE_CODE (len2) != INTEGER_CST)
4250 len = len1;
4251 else if (tree_int_cst_lt (len1, len2))
4252 len = len1;
4253 else
4254 len = len2;
4256 /* If both arguments have side effects, we cannot optimize. */
4257 if (!len || TREE_SIDE_EFFECTS (len))
4258 goto do_libcall;
4260 arg3_rtx = expand_normal (len);
4262 /* Make a place to write the result of the instruction. */
4263 result = target;
4264 if (! (result != 0
4265 && REG_P (result) && GET_MODE (result) == insn_mode
4266 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4267 result = gen_reg_rtx (insn_mode);
4269 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4270 GEN_INT (MIN (arg1_align, arg2_align)));
4272 #endif
4274 if (insn)
4276 emit_insn (insn);
4278 /* Return the value in the proper mode for this function. */
4279 mode = TYPE_MODE (TREE_TYPE (exp));
4280 if (GET_MODE (result) == mode)
4281 return result;
4282 if (target == 0)
4283 return convert_to_mode (mode, result, 0);
4284 convert_move (target, result, 0);
4285 return target;
4288 /* Expand the library call ourselves using a stabilized argument
4289 list to avoid re-evaluating the function's arguments twice. */
4290 #ifdef HAVE_cmpstrnsi
4291 do_libcall:
4292 #endif
4293 fndecl = get_callee_fndecl (exp);
4294 fn = build_call_expr (fndecl, 2, arg1, arg2);
4295 if (TREE_CODE (fn) == CALL_EXPR)
4296 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4297 return expand_call (fn, target, target == const0_rtx);
4299 #endif
4300 return NULL_RTX;
4303 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4304 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4305 the result in TARGET, if convenient. */
4307 static rtx
4308 expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
4310 if (!validate_arglist (exp,
4311 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4312 return NULL_RTX;
4313 else
4315 tree result = fold_builtin_strncmp (CALL_EXPR_ARG (exp, 0),
4316 CALL_EXPR_ARG (exp, 1),
4317 CALL_EXPR_ARG (exp, 2));
4318 if (result)
4319 return expand_expr (result, target, mode, EXPAND_NORMAL);
4322 /* If c_strlen can determine an expression for one of the string
4323 lengths, and it doesn't have side effects, then emit cmpstrnsi
4324 using length MIN(strlen(string)+1, arg3). */
4325 #ifdef HAVE_cmpstrnsi
4326 if (HAVE_cmpstrnsi)
4328 tree len, len1, len2;
4329 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4330 rtx result, insn;
4331 tree fndecl, fn;
4332 tree arg1 = CALL_EXPR_ARG (exp, 0);
4333 tree arg2 = CALL_EXPR_ARG (exp, 1);
4334 tree arg3 = CALL_EXPR_ARG (exp, 2);
4336 int arg1_align
4337 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4338 int arg2_align
4339 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
4340 enum machine_mode insn_mode
4341 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4343 len1 = c_strlen (arg1, 1);
4344 len2 = c_strlen (arg2, 1);
4346 if (len1)
4347 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4348 if (len2)
4349 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4351 /* If we don't have a constant length for the first, use the length
4352 of the second, if we know it. We don't require a constant for
4353 this case; some cost analysis could be done if both are available
4354 but neither is constant. For now, assume they're equally cheap,
4355 unless one has side effects. If both strings have constant lengths,
4356 use the smaller. */
4358 if (!len1)
4359 len = len2;
4360 else if (!len2)
4361 len = len1;
4362 else if (TREE_SIDE_EFFECTS (len1))
4363 len = len2;
4364 else if (TREE_SIDE_EFFECTS (len2))
4365 len = len1;
4366 else if (TREE_CODE (len1) != INTEGER_CST)
4367 len = len2;
4368 else if (TREE_CODE (len2) != INTEGER_CST)
4369 len = len1;
4370 else if (tree_int_cst_lt (len1, len2))
4371 len = len1;
4372 else
4373 len = len2;
4375 /* If both arguments have side effects, we cannot optimize. */
4376 if (!len || TREE_SIDE_EFFECTS (len))
4377 return NULL_RTX;
4379 /* The actual new length parameter is MIN(len,arg3). */
4380 len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
4381 fold_convert (TREE_TYPE (len), arg3));
4383 /* If we don't have POINTER_TYPE, call the function. */
4384 if (arg1_align == 0 || arg2_align == 0)
4385 return NULL_RTX;
4387 /* Make a place to write the result of the instruction. */
4388 result = target;
4389 if (! (result != 0
4390 && REG_P (result) && GET_MODE (result) == insn_mode
4391 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4392 result = gen_reg_rtx (insn_mode);
4394 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4395 arg1 = builtin_save_expr (arg1);
4396 arg2 = builtin_save_expr (arg2);
4397 len = builtin_save_expr (len);
4399 arg1_rtx = get_memory_rtx (arg1, len);
4400 arg2_rtx = get_memory_rtx (arg2, len);
4401 arg3_rtx = expand_normal (len);
4402 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4403 GEN_INT (MIN (arg1_align, arg2_align)));
4404 if (insn)
4406 emit_insn (insn);
4408 /* Return the value in the proper mode for this function. */
4409 mode = TYPE_MODE (TREE_TYPE (exp));
4410 if (GET_MODE (result) == mode)
4411 return result;
4412 if (target == 0)
4413 return convert_to_mode (mode, result, 0);
4414 convert_move (target, result, 0);
4415 return target;
4418 /* Expand the library call ourselves using a stabilized argument
4419 list to avoid re-evaluating the function's arguments twice. */
4420 fndecl = get_callee_fndecl (exp);
4421 fn = build_call_expr (fndecl, 3, arg1, arg2, len);
4422 if (TREE_CODE (fn) == CALL_EXPR)
4423 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4424 return expand_call (fn, target, target == const0_rtx);
4426 #endif
4427 return NULL_RTX;
4430 /* Expand expression EXP, which is a call to the strcat builtin.
4431 Return NULL_RTX if we failed the caller should emit a normal call,
4432 otherwise try to get the result in TARGET, if convenient. */
4434 static rtx
4435 expand_builtin_strcat (tree fndecl, tree exp, rtx target, enum machine_mode mode)
4437 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4438 return NULL_RTX;
4439 else
4441 tree dst = CALL_EXPR_ARG (exp, 0);
4442 tree src = CALL_EXPR_ARG (exp, 1);
4443 const char *p = c_getstr (src);
4445 /* If the string length is zero, return the dst parameter. */
4446 if (p && *p == '\0')
4447 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4449 if (!optimize_size)
4451 /* See if we can store by pieces into (dst + strlen(dst)). */
4452 tree newsrc, newdst,
4453 strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
4454 rtx insns;
4456 /* Stabilize the argument list. */
4457 newsrc = builtin_save_expr (src);
4458 dst = builtin_save_expr (dst);
4460 start_sequence ();
4462 /* Create strlen (dst). */
4463 newdst = build_call_expr (strlen_fn, 1, dst);
4464 /* Create (dst p+ strlen (dst)). */
4466 newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
4467 newdst = builtin_save_expr (newdst);
4469 if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
4471 end_sequence (); /* Stop sequence. */
4472 return NULL_RTX;
4475 /* Output the entire sequence. */
4476 insns = get_insns ();
4477 end_sequence ();
4478 emit_insn (insns);
4480 return expand_expr (dst, target, mode, EXPAND_NORMAL);
4483 return NULL_RTX;
4487 /* Expand expression EXP, which is a call to the strncat builtin.
4488 Return NULL_RTX if we failed the caller should emit a normal call,
4489 otherwise try to get the result in TARGET, if convenient. */
4491 static rtx
4492 expand_builtin_strncat (tree exp, rtx target, enum machine_mode mode)
4494 if (validate_arglist (exp,
4495 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4497 tree result = fold_builtin_strncat (CALL_EXPR_ARG (exp, 0),
4498 CALL_EXPR_ARG (exp, 1),
4499 CALL_EXPR_ARG (exp, 2));
4500 if (result)
4501 return expand_expr (result, target, mode, EXPAND_NORMAL);
4503 return NULL_RTX;
4506 /* Expand expression EXP, which is a call to the strspn builtin.
4507 Return NULL_RTX if we failed the caller should emit a normal call,
4508 otherwise try to get the result in TARGET, if convenient. */
4510 static rtx
4511 expand_builtin_strspn (tree exp, rtx target, enum machine_mode mode)
4513 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4515 tree result = fold_builtin_strspn (CALL_EXPR_ARG (exp, 0),
4516 CALL_EXPR_ARG (exp, 1));
4517 if (result)
4518 return expand_expr (result, target, mode, EXPAND_NORMAL);
4520 return NULL_RTX;
4523 /* Expand expression EXP, which is a call to the strcspn builtin.
4524 Return NULL_RTX if we failed the caller should emit a normal call,
4525 otherwise try to get the result in TARGET, if convenient. */
4527 static rtx
4528 expand_builtin_strcspn (tree exp, rtx target, enum machine_mode mode)
4530 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4532 tree result = fold_builtin_strcspn (CALL_EXPR_ARG (exp, 0),
4533 CALL_EXPR_ARG (exp, 1));
4534 if (result)
4535 return expand_expr (result, target, mode, EXPAND_NORMAL);
4537 return NULL_RTX;
4540 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4541 if that's convenient. */
4544 expand_builtin_saveregs (void)
4546 rtx val, seq;
4548 /* Don't do __builtin_saveregs more than once in a function.
4549 Save the result of the first call and reuse it. */
4550 if (saveregs_value != 0)
4551 return saveregs_value;
4553 /* When this function is called, it means that registers must be
4554 saved on entry to this function. So we migrate the call to the
4555 first insn of this function. */
4557 start_sequence ();
4559 /* Do whatever the machine needs done in this case. */
4560 val = targetm.calls.expand_builtin_saveregs ();
4562 seq = get_insns ();
4563 end_sequence ();
4565 saveregs_value = val;
4567 /* Put the insns after the NOTE that starts the function. If this
4568 is inside a start_sequence, make the outer-level insn chain current, so
4569 the code is placed at the start of the function. */
4570 push_topmost_sequence ();
4571 emit_insn_after (seq, entry_of_function ());
4572 pop_topmost_sequence ();
4574 return val;
4577 /* __builtin_args_info (N) returns word N of the arg space info
4578 for the current function. The number and meanings of words
4579 is controlled by the definition of CUMULATIVE_ARGS. */
4581 static rtx
4582 expand_builtin_args_info (tree exp)
4584 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4585 int *word_ptr = (int *) &crtl->args.info;
4587 gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4589 if (call_expr_nargs (exp) != 0)
4591 if (!host_integerp (CALL_EXPR_ARG (exp, 0), 0))
4592 error ("argument of %<__builtin_args_info%> must be constant");
4593 else
4595 HOST_WIDE_INT wordnum = tree_low_cst (CALL_EXPR_ARG (exp, 0), 0);
4597 if (wordnum < 0 || wordnum >= nwords)
4598 error ("argument of %<__builtin_args_info%> out of range");
4599 else
4600 return GEN_INT (word_ptr[wordnum]);
4603 else
4604 error ("missing argument in %<__builtin_args_info%>");
4606 return const0_rtx;
4609 /* Expand a call to __builtin_next_arg. */
4611 static rtx
4612 expand_builtin_next_arg (void)
4614 /* Checking arguments is already done in fold_builtin_next_arg
4615 that must be called before this function. */
4616 return expand_binop (ptr_mode, add_optab,
4617 crtl->args.internal_arg_pointer,
4618 crtl->args.arg_offset_rtx,
4619 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4622 /* Make it easier for the backends by protecting the valist argument
4623 from multiple evaluations. */
4625 static tree
4626 stabilize_va_list (tree valist, int needs_lvalue)
4628 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4630 if (TREE_SIDE_EFFECTS (valist))
4631 valist = save_expr (valist);
4633 /* For this case, the backends will be expecting a pointer to
4634 TREE_TYPE (va_list_type_node), but it's possible we've
4635 actually been given an array (an actual va_list_type_node).
4636 So fix it. */
4637 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4639 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4640 valist = build_fold_addr_expr_with_type (valist, p1);
4643 else
4645 tree pt;
4647 if (! needs_lvalue)
4649 if (! TREE_SIDE_EFFECTS (valist))
4650 return valist;
4652 pt = build_pointer_type (va_list_type_node);
4653 valist = fold_build1 (ADDR_EXPR, pt, valist);
4654 TREE_SIDE_EFFECTS (valist) = 1;
4657 if (TREE_SIDE_EFFECTS (valist))
4658 valist = save_expr (valist);
4659 valist = build_fold_indirect_ref (valist);
4662 return valist;
4665 /* The "standard" definition of va_list is void*. */
4667 tree
4668 std_build_builtin_va_list (void)
4670 return ptr_type_node;
4673 /* The "standard" implementation of va_start: just assign `nextarg' to
4674 the variable. */
4676 void
4677 std_expand_builtin_va_start (tree valist, rtx nextarg)
4679 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4680 convert_move (va_r, nextarg, 0);
4683 /* Expand EXP, a call to __builtin_va_start. */
4685 static rtx
4686 expand_builtin_va_start (tree exp)
4688 rtx nextarg;
4689 tree valist;
4691 if (call_expr_nargs (exp) < 2)
4693 error ("too few arguments to function %<va_start%>");
4694 return const0_rtx;
4697 if (fold_builtin_next_arg (exp, true))
4698 return const0_rtx;
4700 nextarg = expand_builtin_next_arg ();
4701 valist = stabilize_va_list (CALL_EXPR_ARG (exp, 0), 1);
4703 if (targetm.expand_builtin_va_start)
4704 targetm.expand_builtin_va_start (valist, nextarg);
4705 else
4706 std_expand_builtin_va_start (valist, nextarg);
4708 return const0_rtx;
4711 /* The "standard" implementation of va_arg: read the value from the
4712 current (padded) address and increment by the (padded) size. */
4714 tree
4715 std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4717 tree addr, t, type_size, rounded_size, valist_tmp;
4718 unsigned HOST_WIDE_INT align, boundary;
4719 bool indirect;
4721 #ifdef ARGS_GROW_DOWNWARD
4722 /* All of the alignment and movement below is for args-grow-up machines.
4723 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4724 implement their own specialized gimplify_va_arg_expr routines. */
4725 gcc_unreachable ();
4726 #endif
4728 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4729 if (indirect)
4730 type = build_pointer_type (type);
4732 align = PARM_BOUNDARY / BITS_PER_UNIT;
4733 boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4735 /* Hoist the valist value into a temporary for the moment. */
4736 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4738 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4739 requires greater alignment, we must perform dynamic alignment. */
4740 if (boundary > align
4741 && !integer_zerop (TYPE_SIZE (type)))
4743 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4744 fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
4745 valist_tmp, size_int (boundary - 1)));
4746 gimplify_and_add (t, pre_p);
4748 t = fold_convert (sizetype, valist_tmp);
4749 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4750 fold_convert (TREE_TYPE (valist),
4751 fold_build2 (BIT_AND_EXPR, sizetype, t,
4752 size_int (-boundary))));
4753 gimplify_and_add (t, pre_p);
4755 else
4756 boundary = align;
4758 /* If the actual alignment is less than the alignment of the type,
4759 adjust the type accordingly so that we don't assume strict alignment
4760 when dereferencing the pointer. */
4761 boundary *= BITS_PER_UNIT;
4762 if (boundary < TYPE_ALIGN (type))
4764 type = build_variant_type_copy (type);
4765 TYPE_ALIGN (type) = boundary;
4768 /* Compute the rounded size of the type. */
4769 type_size = size_in_bytes (type);
4770 rounded_size = round_up (type_size, align);
4772 /* Reduce rounded_size so it's sharable with the postqueue. */
4773 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4775 /* Get AP. */
4776 addr = valist_tmp;
4777 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4779 /* Small args are padded downward. */
4780 t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4781 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4782 size_binop (MINUS_EXPR, rounded_size, type_size));
4783 addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
4786 /* Compute new value for AP. */
4787 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
4788 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4789 gimplify_and_add (t, pre_p);
4791 addr = fold_convert (build_pointer_type (type), addr);
4793 if (indirect)
4794 addr = build_va_arg_indirect_ref (addr);
4796 return build_va_arg_indirect_ref (addr);
4799 /* Build an indirect-ref expression over the given TREE, which represents a
4800 piece of a va_arg() expansion. */
4801 tree
4802 build_va_arg_indirect_ref (tree addr)
4804 addr = build_fold_indirect_ref (addr);
4806 if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF. */
4807 mf_mark (addr);
4809 return addr;
4812 /* Return a dummy expression of type TYPE in order to keep going after an
4813 error. */
4815 static tree
4816 dummy_object (tree type)
4818 tree t = build_int_cst (build_pointer_type (type), 0);
4819 return build1 (INDIRECT_REF, type, t);
4822 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4823 builtin function, but a very special sort of operator. */
4825 enum gimplify_status
4826 gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4828 tree promoted_type, want_va_type, have_va_type;
4829 tree valist = TREE_OPERAND (*expr_p, 0);
4830 tree type = TREE_TYPE (*expr_p);
4831 tree t;
4833 /* Verify that valist is of the proper type. */
4834 want_va_type = va_list_type_node;
4835 have_va_type = TREE_TYPE (valist);
4837 if (have_va_type == error_mark_node)
4838 return GS_ERROR;
4840 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4842 /* If va_list is an array type, the argument may have decayed
4843 to a pointer type, e.g. by being passed to another function.
4844 In that case, unwrap both types so that we can compare the
4845 underlying records. */
4846 if (TREE_CODE (have_va_type) == ARRAY_TYPE
4847 || POINTER_TYPE_P (have_va_type))
4849 want_va_type = TREE_TYPE (want_va_type);
4850 have_va_type = TREE_TYPE (have_va_type);
4854 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4856 error ("first argument to %<va_arg%> not of type %<va_list%>");
4857 return GS_ERROR;
4860 /* Generate a diagnostic for requesting data of a type that cannot
4861 be passed through `...' due to type promotion at the call site. */
4862 else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4863 != type)
4865 static bool gave_help;
4867 /* Unfortunately, this is merely undefined, rather than a constraint
4868 violation, so we cannot make this an error. If this call is never
4869 executed, the program is still strictly conforming. */
4870 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4871 type, promoted_type);
4872 if (! gave_help)
4874 gave_help = true;
4875 inform ("(so you should pass %qT not %qT to %<va_arg%>)",
4876 promoted_type, type);
4879 /* We can, however, treat "undefined" any way we please.
4880 Call abort to encourage the user to fix the program. */
4881 inform ("if this code is reached, the program will abort");
4882 t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
4883 append_to_statement_list (t, pre_p);
4885 /* This is dead code, but go ahead and finish so that the
4886 mode of the result comes out right. */
4887 *expr_p = dummy_object (type);
4888 return GS_ALL_DONE;
4890 else
4892 /* Make it easier for the backends by protecting the valist argument
4893 from multiple evaluations. */
4894 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4896 /* For this case, the backends will be expecting a pointer to
4897 TREE_TYPE (va_list_type_node), but it's possible we've
4898 actually been given an array (an actual va_list_type_node).
4899 So fix it. */
4900 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4902 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4903 valist = build_fold_addr_expr_with_type (valist, p1);
4905 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4907 else
4908 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4910 if (!targetm.gimplify_va_arg_expr)
4911 /* FIXME:Once most targets are converted we should merely
4912 assert this is non-null. */
4913 return GS_ALL_DONE;
4915 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4916 return GS_OK;
4920 /* Expand EXP, a call to __builtin_va_end. */
4922 static rtx
4923 expand_builtin_va_end (tree exp)
4925 tree valist = CALL_EXPR_ARG (exp, 0);
4927 /* Evaluate for side effects, if needed. I hate macros that don't
4928 do that. */
4929 if (TREE_SIDE_EFFECTS (valist))
4930 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4932 return const0_rtx;
4935 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4936 builtin rather than just as an assignment in stdarg.h because of the
4937 nastiness of array-type va_list types. */
4939 static rtx
4940 expand_builtin_va_copy (tree exp)
4942 tree dst, src, t;
4944 dst = CALL_EXPR_ARG (exp, 0);
4945 src = CALL_EXPR_ARG (exp, 1);
4947 dst = stabilize_va_list (dst, 1);
4948 src = stabilize_va_list (src, 0);
4950 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4952 t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4953 TREE_SIDE_EFFECTS (t) = 1;
4954 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4956 else
4958 rtx dstb, srcb, size;
4960 /* Evaluate to pointers. */
4961 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4962 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4963 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4964 VOIDmode, EXPAND_NORMAL);
4966 dstb = convert_memory_address (Pmode, dstb);
4967 srcb = convert_memory_address (Pmode, srcb);
4969 /* "Dereference" to BLKmode memories. */
4970 dstb = gen_rtx_MEM (BLKmode, dstb);
4971 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4972 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4973 srcb = gen_rtx_MEM (BLKmode, srcb);
4974 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4975 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4977 /* Copy. */
4978 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4981 return const0_rtx;
4984 /* Expand a call to one of the builtin functions __builtin_frame_address or
4985 __builtin_return_address. */
4987 static rtx
4988 expand_builtin_frame_address (tree fndecl, tree exp)
4990 /* The argument must be a nonnegative integer constant.
4991 It counts the number of frames to scan up the stack.
4992 The value is the return address saved in that frame. */
4993 if (call_expr_nargs (exp) == 0)
4994 /* Warning about missing arg was already issued. */
4995 return const0_rtx;
4996 else if (! host_integerp (CALL_EXPR_ARG (exp, 0), 1))
4998 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4999 error ("invalid argument to %<__builtin_frame_address%>");
5000 else
5001 error ("invalid argument to %<__builtin_return_address%>");
5002 return const0_rtx;
5004 else
5006 rtx tem
5007 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
5008 tree_low_cst (CALL_EXPR_ARG (exp, 0), 1));
5010 /* Some ports cannot access arbitrary stack frames. */
5011 if (tem == NULL)
5013 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5014 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5015 else
5016 warning (0, "unsupported argument to %<__builtin_return_address%>");
5017 return const0_rtx;
5020 /* For __builtin_frame_address, return what we've got. */
5021 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5022 return tem;
5024 if (!REG_P (tem)
5025 && ! CONSTANT_P (tem))
5026 tem = copy_to_mode_reg (Pmode, tem);
5027 return tem;
5031 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5032 we failed and the caller should emit a normal call, otherwise try to get
5033 the result in TARGET, if convenient. */
5035 static rtx
5036 expand_builtin_alloca (tree exp, rtx target)
5038 rtx op0;
5039 rtx result;
5041 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5042 should always expand to function calls. These can be intercepted
5043 in libmudflap. */
5044 if (flag_mudflap)
5045 return NULL_RTX;
5047 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5048 return NULL_RTX;
5050 /* Compute the argument. */
5051 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5053 /* Allocate the desired space. */
5054 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
5055 result = convert_memory_address (ptr_mode, result);
5057 return result;
5060 /* Expand a call to a bswap builtin with argument ARG0. MODE
5061 is the mode to expand with. */
5063 static rtx
5064 expand_builtin_bswap (tree exp, rtx target, rtx subtarget)
5066 enum machine_mode mode;
5067 tree arg;
5068 rtx op0;
5070 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5071 return NULL_RTX;
5073 arg = CALL_EXPR_ARG (exp, 0);
5074 mode = TYPE_MODE (TREE_TYPE (arg));
5075 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5077 target = expand_unop (mode, bswap_optab, op0, target, 1);
5079 gcc_assert (target);
5081 return convert_to_mode (mode, target, 0);
5084 /* Expand a call to a unary builtin in EXP.
5085 Return NULL_RTX if a normal call should be emitted rather than expanding the
5086 function in-line. If convenient, the result should be placed in TARGET.
5087 SUBTARGET may be used as the target for computing one of EXP's operands. */
5089 static rtx
5090 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
5091 rtx subtarget, optab op_optab)
5093 rtx op0;
5095 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5096 return NULL_RTX;
5098 /* Compute the argument. */
5099 op0 = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
5100 VOIDmode, EXPAND_NORMAL);
5101 /* Compute op, into TARGET if possible.
5102 Set TARGET to wherever the result comes back. */
5103 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5104 op_optab, op0, target, 1);
5105 gcc_assert (target);
5107 return convert_to_mode (target_mode, target, 0);
5110 /* If the string passed to fputs is a constant and is one character
5111 long, we attempt to transform this call into __builtin_fputc(). */
5113 static rtx
5114 expand_builtin_fputs (tree exp, rtx target, bool unlocked)
5116 /* Verify the arguments in the original call. */
5117 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5119 tree result = fold_builtin_fputs (CALL_EXPR_ARG (exp, 0),
5120 CALL_EXPR_ARG (exp, 1),
5121 (target == const0_rtx),
5122 unlocked, NULL_TREE);
5123 if (result)
5124 return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
5126 return NULL_RTX;
5129 /* Expand a call to __builtin_expect. We just return our argument
5130 as the builtin_expect semantic should've been already executed by
5131 tree branch prediction pass. */
5133 static rtx
5134 expand_builtin_expect (tree exp, rtx target)
5136 tree arg, c;
5138 if (call_expr_nargs (exp) < 2)
5139 return const0_rtx;
5140 arg = CALL_EXPR_ARG (exp, 0);
5141 c = CALL_EXPR_ARG (exp, 1);
5143 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5144 /* When guessing was done, the hints should be already stripped away. */
5145 gcc_assert (!flag_guess_branch_prob
5146 || optimize == 0 || errorcount || sorrycount);
5147 return target;
5150 void
5151 expand_builtin_trap (void)
5153 #ifdef HAVE_trap
5154 if (HAVE_trap)
5155 emit_insn (gen_trap ());
5156 else
5157 #endif
5158 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
5159 emit_barrier ();
5162 /* Expand EXP, a call to fabs, fabsf or fabsl.
5163 Return NULL_RTX if a normal call should be emitted rather than expanding
5164 the function inline. If convenient, the result should be placed
5165 in TARGET. SUBTARGET may be used as the target for computing
5166 the operand. */
5168 static rtx
5169 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5171 enum machine_mode mode;
5172 tree arg;
5173 rtx op0;
5175 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5176 return NULL_RTX;
5178 arg = CALL_EXPR_ARG (exp, 0);
5179 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5180 mode = TYPE_MODE (TREE_TYPE (arg));
5181 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5182 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5185 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5186 Return NULL is a normal call should be emitted rather than expanding the
5187 function inline. If convenient, the result should be placed in TARGET.
5188 SUBTARGET may be used as the target for computing the operand. */
5190 static rtx
5191 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5193 rtx op0, op1;
5194 tree arg;
5196 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5197 return NULL_RTX;
5199 arg = CALL_EXPR_ARG (exp, 0);
5200 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5202 arg = CALL_EXPR_ARG (exp, 1);
5203 op1 = expand_normal (arg);
5205 return expand_copysign (op0, op1, target);
5208 /* Create a new constant string literal and return a char* pointer to it.
5209 The STRING_CST value is the LEN characters at STR. */
5210 tree
5211 build_string_literal (int len, const char *str)
5213 tree t, elem, index, type;
5215 t = build_string (len, str);
5216 elem = build_type_variant (char_type_node, 1, 0);
5217 index = build_index_type (size_int (len - 1));
5218 type = build_array_type (elem, index);
5219 TREE_TYPE (t) = type;
5220 TREE_CONSTANT (t) = 1;
5221 TREE_READONLY (t) = 1;
5222 TREE_STATIC (t) = 1;
5224 type = build_pointer_type (elem);
5225 t = build1 (ADDR_EXPR, type,
5226 build4 (ARRAY_REF, elem,
5227 t, integer_zero_node, NULL_TREE, NULL_TREE));
5228 return t;
5231 /* Expand EXP, a call to printf or printf_unlocked.
5232 Return NULL_RTX if a normal call should be emitted rather than transforming
5233 the function inline. If convenient, the result should be placed in
5234 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5235 call. */
5236 static rtx
5237 expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
5238 bool unlocked)
5240 /* If we're using an unlocked function, assume the other unlocked
5241 functions exist explicitly. */
5242 tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
5243 : implicit_built_in_decls[BUILT_IN_PUTCHAR];
5244 tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
5245 : implicit_built_in_decls[BUILT_IN_PUTS];
5246 const char *fmt_str;
5247 tree fn = 0;
5248 tree fmt, arg;
5249 int nargs = call_expr_nargs (exp);
5251 /* If the return value is used, don't do the transformation. */
5252 if (target != const0_rtx)
5253 return NULL_RTX;
5255 /* Verify the required arguments in the original call. */
5256 if (nargs == 0)
5257 return NULL_RTX;
5258 fmt = CALL_EXPR_ARG (exp, 0);
5259 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5260 return NULL_RTX;
5262 /* Check whether the format is a literal string constant. */
5263 fmt_str = c_getstr (fmt);
5264 if (fmt_str == NULL)
5265 return NULL_RTX;
5267 if (!init_target_chars ())
5268 return NULL_RTX;
5270 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5271 if (strcmp (fmt_str, target_percent_s_newline) == 0)
5273 if ((nargs != 2)
5274 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 1))))
5275 return NULL_RTX;
5276 if (fn_puts)
5277 fn = build_call_expr (fn_puts, 1, CALL_EXPR_ARG (exp, 1));
5279 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5280 else if (strcmp (fmt_str, target_percent_c) == 0)
5282 if ((nargs != 2)
5283 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1))) != INTEGER_TYPE)
5284 return NULL_RTX;
5285 if (fn_putchar)
5286 fn = build_call_expr (fn_putchar, 1, CALL_EXPR_ARG (exp, 1));
5288 else
5290 /* We can't handle anything else with % args or %% ... yet. */
5291 if (strchr (fmt_str, target_percent))
5292 return NULL_RTX;
5294 if (nargs > 1)
5295 return NULL_RTX;
5297 /* If the format specifier was "", printf does nothing. */
5298 if (fmt_str[0] == '\0')
5299 return const0_rtx;
5300 /* If the format specifier has length of 1, call putchar. */
5301 if (fmt_str[1] == '\0')
5303 /* Given printf("c"), (where c is any one character,)
5304 convert "c"[0] to an int and pass that to the replacement
5305 function. */
5306 arg = build_int_cst (NULL_TREE, fmt_str[0]);
5307 if (fn_putchar)
5308 fn = build_call_expr (fn_putchar, 1, arg);
5310 else
5312 /* If the format specifier was "string\n", call puts("string"). */
5313 size_t len = strlen (fmt_str);
5314 if ((unsigned char)fmt_str[len - 1] == target_newline)
5316 /* Create a NUL-terminated string that's one char shorter
5317 than the original, stripping off the trailing '\n'. */
5318 char *newstr = XALLOCAVEC (char, len);
5319 memcpy (newstr, fmt_str, len - 1);
5320 newstr[len - 1] = 0;
5321 arg = build_string_literal (len, newstr);
5322 if (fn_puts)
5323 fn = build_call_expr (fn_puts, 1, arg);
5325 else
5326 /* We'd like to arrange to call fputs(string,stdout) here,
5327 but we need stdout and don't have a way to get it yet. */
5328 return NULL_RTX;
5332 if (!fn)
5333 return NULL_RTX;
5334 if (TREE_CODE (fn) == CALL_EXPR)
5335 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5336 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5339 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5340 Return NULL_RTX if a normal call should be emitted rather than transforming
5341 the function inline. If convenient, the result should be placed in
5342 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5343 call. */
5344 static rtx
5345 expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5346 bool unlocked)
5348 /* If we're using an unlocked function, assume the other unlocked
5349 functions exist explicitly. */
5350 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5351 : implicit_built_in_decls[BUILT_IN_FPUTC];
5352 tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5353 : implicit_built_in_decls[BUILT_IN_FPUTS];
5354 const char *fmt_str;
5355 tree fn = 0;
5356 tree fmt, fp, arg;
5357 int nargs = call_expr_nargs (exp);
5359 /* If the return value is used, don't do the transformation. */
5360 if (target != const0_rtx)
5361 return NULL_RTX;
5363 /* Verify the required arguments in the original call. */
5364 if (nargs < 2)
5365 return NULL_RTX;
5366 fp = CALL_EXPR_ARG (exp, 0);
5367 if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5368 return NULL_RTX;
5369 fmt = CALL_EXPR_ARG (exp, 1);
5370 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5371 return NULL_RTX;
5373 /* Check whether the format is a literal string constant. */
5374 fmt_str = c_getstr (fmt);
5375 if (fmt_str == NULL)
5376 return NULL_RTX;
5378 if (!init_target_chars ())
5379 return NULL_RTX;
5381 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5382 if (strcmp (fmt_str, target_percent_s) == 0)
5384 if ((nargs != 3)
5385 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp, 2))))
5386 return NULL_RTX;
5387 arg = CALL_EXPR_ARG (exp, 2);
5388 if (fn_fputs)
5389 fn = build_call_expr (fn_fputs, 2, arg, fp);
5391 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5392 else if (strcmp (fmt_str, target_percent_c) == 0)
5394 if ((nargs != 3)
5395 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2))) != INTEGER_TYPE)
5396 return NULL_RTX;
5397 arg = CALL_EXPR_ARG (exp, 2);
5398 if (fn_fputc)
5399 fn = build_call_expr (fn_fputc, 2, arg, fp);
5401 else
5403 /* We can't handle anything else with % args or %% ... yet. */
5404 if (strchr (fmt_str, target_percent))
5405 return NULL_RTX;
5407 if (nargs > 2)
5408 return NULL_RTX;
5410 /* If the format specifier was "", fprintf does nothing. */
5411 if (fmt_str[0] == '\0')
5413 /* Evaluate and ignore FILE* argument for side-effects. */
5414 expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5415 return const0_rtx;
5418 /* When "string" doesn't contain %, replace all cases of
5419 fprintf(stream,string) with fputs(string,stream). The fputs
5420 builtin will take care of special cases like length == 1. */
5421 if (fn_fputs)
5422 fn = build_call_expr (fn_fputs, 2, fmt, fp);
5425 if (!fn)
5426 return NULL_RTX;
5427 if (TREE_CODE (fn) == CALL_EXPR)
5428 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5429 return expand_expr (fn, target, mode, EXPAND_NORMAL);
5432 /* Expand a call EXP to sprintf. Return NULL_RTX if
5433 a normal call should be emitted rather than expanding the function
5434 inline. If convenient, the result should be placed in TARGET with
5435 mode MODE. */
5437 static rtx
5438 expand_builtin_sprintf (tree exp, rtx target, enum machine_mode mode)
5440 tree dest, fmt;
5441 const char *fmt_str;
5442 int nargs = call_expr_nargs (exp);
5444 /* Verify the required arguments in the original call. */
5445 if (nargs < 2)
5446 return NULL_RTX;
5447 dest = CALL_EXPR_ARG (exp, 0);
5448 if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5449 return NULL_RTX;
5450 fmt = CALL_EXPR_ARG (exp, 0);
5451 if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5452 return NULL_RTX;
5454 /* Check whether the format is a literal string constant. */
5455 fmt_str = c_getstr (fmt);
5456 if (fmt_str == NULL)
5457 return NULL_RTX;
5459 if (!init_target_chars ())
5460 return NULL_RTX;
5462 /* If the format doesn't contain % args or %%, use strcpy. */
5463 if (strchr (fmt_str, target_percent) == 0)
5465 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5466 tree exp;
5468 if ((nargs > 2) || ! fn)
5469 return NULL_RTX;
5470 expand_expr (build_call_expr (fn, 2, dest, fmt),
5471 const0_rtx, VOIDmode, EXPAND_NORMAL);
5472 if (target == const0_rtx)
5473 return const0_rtx;
5474 exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5475 return expand_expr (exp, target, mode, EXPAND_NORMAL);
5477 /* If the format is "%s", use strcpy if the result isn't used. */
5478 else if (strcmp (fmt_str, target_percent_s) == 0)
5480 tree fn, arg, len;
5481 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5483 if (! fn)
5484 return NULL_RTX;
5485 if (nargs != 3)
5486 return NULL_RTX;
5487 arg = CALL_EXPR_ARG (exp, 2);
5488 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5489 return NULL_RTX;
5491 if (target != const0_rtx)
5493 len = c_strlen (arg, 1);
5494 if (! len || TREE_CODE (len) != INTEGER_CST)
5495 return NULL_RTX;
5497 else
5498 len = NULL_TREE;
5500 expand_expr (build_call_expr (fn, 2, dest, arg),
5501 const0_rtx, VOIDmode, EXPAND_NORMAL);
5503 if (target == const0_rtx)
5504 return const0_rtx;
5505 return expand_expr (len, target, mode, EXPAND_NORMAL);
5508 return NULL_RTX;
5511 /* Expand a call to either the entry or exit function profiler. */
5513 static rtx
5514 expand_builtin_profile_func (bool exitp)
5516 rtx this, which;
5518 this = DECL_RTL (current_function_decl);
5519 gcc_assert (MEM_P (this));
5520 this = XEXP (this, 0);
5522 if (exitp)
5523 which = profile_function_exit_libfunc;
5524 else
5525 which = profile_function_entry_libfunc;
5527 emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5528 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5530 Pmode);
5532 return const0_rtx;
5535 /* Expand a call to __builtin___clear_cache. */
5537 static rtx
5538 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
5540 #ifndef HAVE_clear_cache
5541 #ifdef CLEAR_INSN_CACHE
5542 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5543 does something. Just do the default expansion to a call to
5544 __clear_cache(). */
5545 return NULL_RTX;
5546 #else
5547 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5548 does nothing. There is no need to call it. Do nothing. */
5549 return const0_rtx;
5550 #endif /* CLEAR_INSN_CACHE */
5551 #else
5552 /* We have a "clear_cache" insn, and it will handle everything. */
5553 tree begin, end;
5554 rtx begin_rtx, end_rtx;
5555 enum insn_code icode;
5557 /* We must not expand to a library call. If we did, any
5558 fallback library function in libgcc that might contain a call to
5559 __builtin___clear_cache() would recurse infinitely. */
5560 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5562 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5563 return const0_rtx;
5566 if (HAVE_clear_cache)
5568 icode = CODE_FOR_clear_cache;
5570 begin = CALL_EXPR_ARG (exp, 0);
5571 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5572 begin_rtx = convert_memory_address (Pmode, begin_rtx);
5573 if (!insn_data[icode].operand[0].predicate (begin_rtx, Pmode))
5574 begin_rtx = copy_to_mode_reg (Pmode, begin_rtx);
5576 end = CALL_EXPR_ARG (exp, 1);
5577 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5578 end_rtx = convert_memory_address (Pmode, end_rtx);
5579 if (!insn_data[icode].operand[1].predicate (end_rtx, Pmode))
5580 end_rtx = copy_to_mode_reg (Pmode, end_rtx);
5582 emit_insn (gen_clear_cache (begin_rtx, end_rtx));
5584 return const0_rtx;
5585 #endif /* HAVE_clear_cache */
5588 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5590 static rtx
5591 round_trampoline_addr (rtx tramp)
5593 rtx temp, addend, mask;
5595 /* If we don't need too much alignment, we'll have been guaranteed
5596 proper alignment by get_trampoline_type. */
5597 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5598 return tramp;
5600 /* Round address up to desired boundary. */
5601 temp = gen_reg_rtx (Pmode);
5602 addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5603 mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5605 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5606 temp, 0, OPTAB_LIB_WIDEN);
5607 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5608 temp, 0, OPTAB_LIB_WIDEN);
5610 return tramp;
5613 static rtx
5614 expand_builtin_init_trampoline (tree exp)
5616 tree t_tramp, t_func, t_chain;
5617 rtx r_tramp, r_func, r_chain;
5618 #ifdef TRAMPOLINE_TEMPLATE
5619 rtx blktramp;
5620 #endif
5622 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5623 POINTER_TYPE, VOID_TYPE))
5624 return NULL_RTX;
5626 t_tramp = CALL_EXPR_ARG (exp, 0);
5627 t_func = CALL_EXPR_ARG (exp, 1);
5628 t_chain = CALL_EXPR_ARG (exp, 2);
5630 r_tramp = expand_normal (t_tramp);
5631 r_func = expand_normal (t_func);
5632 r_chain = expand_normal (t_chain);
5634 /* Generate insns to initialize the trampoline. */
5635 r_tramp = round_trampoline_addr (r_tramp);
5636 #ifdef TRAMPOLINE_TEMPLATE
5637 blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5638 set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5639 emit_block_move (blktramp, assemble_trampoline_template (),
5640 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5641 #endif
5642 trampolines_created = 1;
5643 INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5645 return const0_rtx;
5648 static rtx
5649 expand_builtin_adjust_trampoline (tree exp)
5651 rtx tramp;
5653 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5654 return NULL_RTX;
5656 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5657 tramp = round_trampoline_addr (tramp);
5658 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5659 TRAMPOLINE_ADJUST_ADDRESS (tramp);
5660 #endif
5662 return tramp;
5665 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5666 function. The function first checks whether the back end provides
5667 an insn to implement signbit for the respective mode. If not, it
5668 checks whether the floating point format of the value is such that
5669 the sign bit can be extracted. If that is not the case, the
5670 function returns NULL_RTX to indicate that a normal call should be
5671 emitted rather than expanding the function in-line. EXP is the
5672 expression that is a call to the builtin function; if convenient,
5673 the result should be placed in TARGET. */
5674 static rtx
5675 expand_builtin_signbit (tree exp, rtx target)
5677 const struct real_format *fmt;
5678 enum machine_mode fmode, imode, rmode;
5679 HOST_WIDE_INT hi, lo;
5680 tree arg;
5681 int word, bitpos;
5682 enum insn_code icode;
5683 rtx temp;
5685 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5686 return NULL_RTX;
5688 arg = CALL_EXPR_ARG (exp, 0);
5689 fmode = TYPE_MODE (TREE_TYPE (arg));
5690 rmode = TYPE_MODE (TREE_TYPE (exp));
5691 fmt = REAL_MODE_FORMAT (fmode);
5693 arg = builtin_save_expr (arg);
5695 /* Expand the argument yielding a RTX expression. */
5696 temp = expand_normal (arg);
5698 /* Check if the back end provides an insn that handles signbit for the
5699 argument's mode. */
5700 icode = signbit_optab->handlers [(int) fmode].insn_code;
5701 if (icode != CODE_FOR_nothing)
5703 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5704 emit_unop_insn (icode, target, temp, UNKNOWN);
5705 return target;
5708 /* For floating point formats without a sign bit, implement signbit
5709 as "ARG < 0.0". */
5710 bitpos = fmt->signbit_ro;
5711 if (bitpos < 0)
5713 /* But we can't do this if the format supports signed zero. */
5714 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5715 return NULL_RTX;
5717 arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5718 build_real (TREE_TYPE (arg), dconst0));
5719 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5722 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5724 imode = int_mode_for_mode (fmode);
5725 if (imode == BLKmode)
5726 return NULL_RTX;
5727 temp = gen_lowpart (imode, temp);
5729 else
5731 imode = word_mode;
5732 /* Handle targets with different FP word orders. */
5733 if (FLOAT_WORDS_BIG_ENDIAN)
5734 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5735 else
5736 word = bitpos / BITS_PER_WORD;
5737 temp = operand_subword_force (temp, word, fmode);
5738 bitpos = bitpos % BITS_PER_WORD;
5741 /* Force the intermediate word_mode (or narrower) result into a
5742 register. This avoids attempting to create paradoxical SUBREGs
5743 of floating point modes below. */
5744 temp = force_reg (imode, temp);
5746 /* If the bitpos is within the "result mode" lowpart, the operation
5747 can be implement with a single bitwise AND. Otherwise, we need
5748 a right shift and an AND. */
5750 if (bitpos < GET_MODE_BITSIZE (rmode))
5752 if (bitpos < HOST_BITS_PER_WIDE_INT)
5754 hi = 0;
5755 lo = (HOST_WIDE_INT) 1 << bitpos;
5757 else
5759 hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5760 lo = 0;
5763 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5764 temp = gen_lowpart (rmode, temp);
5765 temp = expand_binop (rmode, and_optab, temp,
5766 immed_double_const (lo, hi, rmode),
5767 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5769 else
5771 /* Perform a logical right shift to place the signbit in the least
5772 significant bit, then truncate the result to the desired mode
5773 and mask just this bit. */
5774 temp = expand_shift (RSHIFT_EXPR, imode, temp,
5775 build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5776 temp = gen_lowpart (rmode, temp);
5777 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5778 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5781 return temp;
5784 /* Expand fork or exec calls. TARGET is the desired target of the
5785 call. EXP is the call. FN is the
5786 identificator of the actual function. IGNORE is nonzero if the
5787 value is to be ignored. */
5789 static rtx
5790 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5792 tree id, decl;
5793 tree call;
5795 /* If we are not profiling, just call the function. */
5796 if (!profile_arc_flag)
5797 return NULL_RTX;
5799 /* Otherwise call the wrapper. This should be equivalent for the rest of
5800 compiler, so the code does not diverge, and the wrapper may run the
5801 code necessary for keeping the profiling sane. */
5803 switch (DECL_FUNCTION_CODE (fn))
5805 case BUILT_IN_FORK:
5806 id = get_identifier ("__gcov_fork");
5807 break;
5809 case BUILT_IN_EXECL:
5810 id = get_identifier ("__gcov_execl");
5811 break;
5813 case BUILT_IN_EXECV:
5814 id = get_identifier ("__gcov_execv");
5815 break;
5817 case BUILT_IN_EXECLP:
5818 id = get_identifier ("__gcov_execlp");
5819 break;
5821 case BUILT_IN_EXECLE:
5822 id = get_identifier ("__gcov_execle");
5823 break;
5825 case BUILT_IN_EXECVP:
5826 id = get_identifier ("__gcov_execvp");
5827 break;
5829 case BUILT_IN_EXECVE:
5830 id = get_identifier ("__gcov_execve");
5831 break;
5833 default:
5834 gcc_unreachable ();
5837 decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5838 DECL_EXTERNAL (decl) = 1;
5839 TREE_PUBLIC (decl) = 1;
5840 DECL_ARTIFICIAL (decl) = 1;
5841 TREE_NOTHROW (decl) = 1;
5842 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5843 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5844 call = rewrite_call_expr (exp, 0, decl, 0);
5845 return expand_call (call, target, ignore);
5850 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5851 the pointer in these functions is void*, the tree optimizers may remove
5852 casts. The mode computed in expand_builtin isn't reliable either, due
5853 to __sync_bool_compare_and_swap.
5855 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5856 group of builtins. This gives us log2 of the mode size. */
5858 static inline enum machine_mode
5859 get_builtin_sync_mode (int fcode_diff)
5861 /* The size is not negotiable, so ask not to get BLKmode in return
5862 if the target indicates that a smaller size would be better. */
5863 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5866 /* Expand the memory expression LOC and return the appropriate memory operand
5867 for the builtin_sync operations. */
5869 static rtx
5870 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5872 rtx addr, mem;
5874 addr = expand_expr (loc, NULL_RTX, Pmode, EXPAND_SUM);
5876 /* Note that we explicitly do not want any alias information for this
5877 memory, so that we kill all other live memories. Otherwise we don't
5878 satisfy the full barrier semantics of the intrinsic. */
5879 mem = validize_mem (gen_rtx_MEM (mode, addr));
5881 set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5882 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5883 MEM_VOLATILE_P (mem) = 1;
5885 return mem;
5888 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5889 EXP is the CALL_EXPR. CODE is the rtx code
5890 that corresponds to the arithmetic or logical operation from the name;
5891 an exception here is that NOT actually means NAND. TARGET is an optional
5892 place for us to store the results; AFTER is true if this is the
5893 fetch_and_xxx form. IGNORE is true if we don't actually care about
5894 the result of the operation at all. */
5896 static rtx
5897 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5898 enum rtx_code code, bool after,
5899 rtx target, bool ignore)
5901 rtx val, mem;
5902 enum machine_mode old_mode;
5904 /* Expand the operands. */
5905 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5907 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5908 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5909 of CONST_INTs, where we know the old_mode only from the call argument. */
5910 old_mode = GET_MODE (val);
5911 if (old_mode == VOIDmode)
5912 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5913 val = convert_modes (mode, old_mode, val, 1);
5915 if (ignore)
5916 return expand_sync_operation (mem, val, code);
5917 else
5918 return expand_sync_fetch_operation (mem, val, code, after, target);
5921 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5922 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5923 true if this is the boolean form. TARGET is a place for us to store the
5924 results; this is NOT optional if IS_BOOL is true. */
5926 static rtx
5927 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5928 bool is_bool, rtx target)
5930 rtx old_val, new_val, mem;
5931 enum machine_mode old_mode;
5933 /* Expand the operands. */
5934 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5937 old_val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX,
5938 mode, EXPAND_NORMAL);
5939 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5940 of CONST_INTs, where we know the old_mode only from the call argument. */
5941 old_mode = GET_MODE (old_val);
5942 if (old_mode == VOIDmode)
5943 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5944 old_val = convert_modes (mode, old_mode, old_val, 1);
5946 new_val = expand_expr (CALL_EXPR_ARG (exp, 2), NULL_RTX,
5947 mode, EXPAND_NORMAL);
5948 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5949 of CONST_INTs, where we know the old_mode only from the call argument. */
5950 old_mode = GET_MODE (new_val);
5951 if (old_mode == VOIDmode)
5952 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 2)));
5953 new_val = convert_modes (mode, old_mode, new_val, 1);
5955 if (is_bool)
5956 return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5957 else
5958 return expand_val_compare_and_swap (mem, old_val, new_val, target);
5961 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5962 general form is actually an atomic exchange, and some targets only
5963 support a reduced form with the second argument being a constant 1.
5964 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5965 the results. */
5967 static rtx
5968 expand_builtin_lock_test_and_set (enum machine_mode mode, tree exp,
5969 rtx target)
5971 rtx val, mem;
5972 enum machine_mode old_mode;
5974 /* Expand the operands. */
5975 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5976 val = expand_expr (CALL_EXPR_ARG (exp, 1), NULL_RTX, mode, EXPAND_NORMAL);
5977 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5978 of CONST_INTs, where we know the old_mode only from the call argument. */
5979 old_mode = GET_MODE (val);
5980 if (old_mode == VOIDmode)
5981 old_mode = TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 1)));
5982 val = convert_modes (mode, old_mode, val, 1);
5984 return expand_sync_lock_test_and_set (mem, val, target);
5987 /* Expand the __sync_synchronize intrinsic. */
5989 static void
5990 expand_builtin_synchronize (void)
5992 tree x;
5994 #ifdef HAVE_memory_barrier
5995 if (HAVE_memory_barrier)
5997 emit_insn (gen_memory_barrier ());
5998 return;
6000 #endif
6002 if (synchronize_libfunc != NULL_RTX)
6004 emit_library_call (synchronize_libfunc, LCT_NORMAL, VOIDmode, 0);
6005 return;
6008 /* If no explicit memory barrier instruction is available, create an
6009 empty asm stmt with a memory clobber. */
6010 x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
6011 tree_cons (NULL, build_string (6, "memory"), NULL));
6012 ASM_VOLATILE_P (x) = 1;
6013 expand_asm_expr (x);
6016 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6018 static void
6019 expand_builtin_lock_release (enum machine_mode mode, tree exp)
6021 enum insn_code icode;
6022 rtx mem, insn;
6023 rtx val = const0_rtx;
6025 /* Expand the operands. */
6026 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6028 /* If there is an explicit operation in the md file, use it. */
6029 icode = sync_lock_release[mode];
6030 if (icode != CODE_FOR_nothing)
6032 if (!insn_data[icode].operand[1].predicate (val, mode))
6033 val = force_reg (mode, val);
6035 insn = GEN_FCN (icode) (mem, val);
6036 if (insn)
6038 emit_insn (insn);
6039 return;
6043 /* Otherwise we can implement this operation by emitting a barrier
6044 followed by a store of zero. */
6045 expand_builtin_synchronize ();
6046 emit_move_insn (mem, val);
6049 /* Expand an expression EXP that calls a built-in function,
6050 with result going to TARGET if that's convenient
6051 (and in mode MODE if that's convenient).
6052 SUBTARGET may be used as the target for computing one of EXP's operands.
6053 IGNORE is nonzero if the value is to be ignored. */
6056 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
6057 int ignore)
6059 tree fndecl = get_callee_fndecl (exp);
6060 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6061 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6063 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6064 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6066 /* When not optimizing, generate calls to library functions for a certain
6067 set of builtins. */
6068 if (!optimize
6069 && !called_as_built_in (fndecl)
6070 && DECL_ASSEMBLER_NAME_SET_P (fndecl)
6071 && fcode != BUILT_IN_ALLOCA)
6072 return expand_call (exp, target, ignore);
6074 /* The built-in function expanders test for target == const0_rtx
6075 to determine whether the function's result will be ignored. */
6076 if (ignore)
6077 target = const0_rtx;
6079 /* If the result of a pure or const built-in function is ignored, and
6080 none of its arguments are volatile, we can avoid expanding the
6081 built-in call and just evaluate the arguments for side-effects. */
6082 if (target == const0_rtx
6083 && (DECL_PURE_P (fndecl) || TREE_READONLY (fndecl)))
6085 bool volatilep = false;
6086 tree arg;
6087 call_expr_arg_iterator iter;
6089 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6090 if (TREE_THIS_VOLATILE (arg))
6092 volatilep = true;
6093 break;
6096 if (! volatilep)
6098 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6099 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6100 return const0_rtx;
6104 switch (fcode)
6106 CASE_FLT_FN (BUILT_IN_FABS):
6107 target = expand_builtin_fabs (exp, target, subtarget);
6108 if (target)
6109 return target;
6110 break;
6112 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6113 target = expand_builtin_copysign (exp, target, subtarget);
6114 if (target)
6115 return target;
6116 break;
6118 /* Just do a normal library call if we were unable to fold
6119 the values. */
6120 CASE_FLT_FN (BUILT_IN_CABS):
6121 break;
6123 CASE_FLT_FN (BUILT_IN_EXP):
6124 CASE_FLT_FN (BUILT_IN_EXP10):
6125 CASE_FLT_FN (BUILT_IN_POW10):
6126 CASE_FLT_FN (BUILT_IN_EXP2):
6127 CASE_FLT_FN (BUILT_IN_EXPM1):
6128 CASE_FLT_FN (BUILT_IN_LOGB):
6129 CASE_FLT_FN (BUILT_IN_LOG):
6130 CASE_FLT_FN (BUILT_IN_LOG10):
6131 CASE_FLT_FN (BUILT_IN_LOG2):
6132 CASE_FLT_FN (BUILT_IN_LOG1P):
6133 CASE_FLT_FN (BUILT_IN_TAN):
6134 CASE_FLT_FN (BUILT_IN_ASIN):
6135 CASE_FLT_FN (BUILT_IN_ACOS):
6136 CASE_FLT_FN (BUILT_IN_ATAN):
6137 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6138 because of possible accuracy problems. */
6139 if (! flag_unsafe_math_optimizations)
6140 break;
6141 CASE_FLT_FN (BUILT_IN_SQRT):
6142 CASE_FLT_FN (BUILT_IN_FLOOR):
6143 CASE_FLT_FN (BUILT_IN_CEIL):
6144 CASE_FLT_FN (BUILT_IN_TRUNC):
6145 CASE_FLT_FN (BUILT_IN_ROUND):
6146 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6147 CASE_FLT_FN (BUILT_IN_RINT):
6148 target = expand_builtin_mathfn (exp, target, subtarget);
6149 if (target)
6150 return target;
6151 break;
6153 CASE_FLT_FN (BUILT_IN_ILOGB):
6154 if (! flag_unsafe_math_optimizations)
6155 break;
6156 CASE_FLT_FN (BUILT_IN_ISINF):
6157 CASE_FLT_FN (BUILT_IN_FINITE):
6158 case BUILT_IN_ISFINITE:
6159 case BUILT_IN_ISNORMAL:
6160 target = expand_builtin_interclass_mathfn (exp, target, subtarget);
6161 if (target)
6162 return target;
6163 break;
6165 CASE_FLT_FN (BUILT_IN_LCEIL):
6166 CASE_FLT_FN (BUILT_IN_LLCEIL):
6167 CASE_FLT_FN (BUILT_IN_LFLOOR):
6168 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6169 target = expand_builtin_int_roundingfn (exp, target, subtarget);
6170 if (target)
6171 return target;
6172 break;
6174 CASE_FLT_FN (BUILT_IN_LRINT):
6175 CASE_FLT_FN (BUILT_IN_LLRINT):
6176 CASE_FLT_FN (BUILT_IN_LROUND):
6177 CASE_FLT_FN (BUILT_IN_LLROUND):
6178 target = expand_builtin_int_roundingfn_2 (exp, target, subtarget);
6179 if (target)
6180 return target;
6181 break;
6183 CASE_FLT_FN (BUILT_IN_POW):
6184 target = expand_builtin_pow (exp, target, subtarget);
6185 if (target)
6186 return target;
6187 break;
6189 CASE_FLT_FN (BUILT_IN_POWI):
6190 target = expand_builtin_powi (exp, target, subtarget);
6191 if (target)
6192 return target;
6193 break;
6195 CASE_FLT_FN (BUILT_IN_ATAN2):
6196 CASE_FLT_FN (BUILT_IN_LDEXP):
6197 CASE_FLT_FN (BUILT_IN_SCALB):
6198 CASE_FLT_FN (BUILT_IN_SCALBN):
6199 CASE_FLT_FN (BUILT_IN_SCALBLN):
6200 if (! flag_unsafe_math_optimizations)
6201 break;
6203 CASE_FLT_FN (BUILT_IN_FMOD):
6204 CASE_FLT_FN (BUILT_IN_REMAINDER):
6205 CASE_FLT_FN (BUILT_IN_DREM):
6206 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6207 if (target)
6208 return target;
6209 break;
6211 CASE_FLT_FN (BUILT_IN_CEXPI):
6212 target = expand_builtin_cexpi (exp, target, subtarget);
6213 gcc_assert (target);
6214 return target;
6216 CASE_FLT_FN (BUILT_IN_SIN):
6217 CASE_FLT_FN (BUILT_IN_COS):
6218 if (! flag_unsafe_math_optimizations)
6219 break;
6220 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6221 if (target)
6222 return target;
6223 break;
6225 CASE_FLT_FN (BUILT_IN_SINCOS):
6226 if (! flag_unsafe_math_optimizations)
6227 break;
6228 target = expand_builtin_sincos (exp);
6229 if (target)
6230 return target;
6231 break;
6233 case BUILT_IN_APPLY_ARGS:
6234 return expand_builtin_apply_args ();
6236 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6237 FUNCTION with a copy of the parameters described by
6238 ARGUMENTS, and ARGSIZE. It returns a block of memory
6239 allocated on the stack into which is stored all the registers
6240 that might possibly be used for returning the result of a
6241 function. ARGUMENTS is the value returned by
6242 __builtin_apply_args. ARGSIZE is the number of bytes of
6243 arguments that must be copied. ??? How should this value be
6244 computed? We'll also need a safe worst case value for varargs
6245 functions. */
6246 case BUILT_IN_APPLY:
6247 if (!validate_arglist (exp, POINTER_TYPE,
6248 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6249 && !validate_arglist (exp, REFERENCE_TYPE,
6250 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6251 return const0_rtx;
6252 else
6254 rtx ops[3];
6256 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6257 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6258 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6260 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6263 /* __builtin_return (RESULT) causes the function to return the
6264 value described by RESULT. RESULT is address of the block of
6265 memory returned by __builtin_apply. */
6266 case BUILT_IN_RETURN:
6267 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6268 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6269 return const0_rtx;
6271 case BUILT_IN_SAVEREGS:
6272 return expand_builtin_saveregs ();
6274 case BUILT_IN_ARGS_INFO:
6275 return expand_builtin_args_info (exp);
6277 case BUILT_IN_VA_ARG_PACK:
6278 /* All valid uses of __builtin_va_arg_pack () are removed during
6279 inlining. */
6280 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6281 return const0_rtx;
6283 case BUILT_IN_VA_ARG_PACK_LEN:
6284 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6285 inlining. */
6286 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6287 return const0_rtx;
6289 /* Return the address of the first anonymous stack arg. */
6290 case BUILT_IN_NEXT_ARG:
6291 if (fold_builtin_next_arg (exp, false))
6292 return const0_rtx;
6293 return expand_builtin_next_arg ();
6295 case BUILT_IN_CLEAR_CACHE:
6296 target = expand_builtin___clear_cache (exp);
6297 if (target)
6298 return target;
6299 break;
6301 case BUILT_IN_CLASSIFY_TYPE:
6302 return expand_builtin_classify_type (exp);
6304 case BUILT_IN_CONSTANT_P:
6305 return const0_rtx;
6307 case BUILT_IN_FRAME_ADDRESS:
6308 case BUILT_IN_RETURN_ADDRESS:
6309 return expand_builtin_frame_address (fndecl, exp);
6311 /* Returns the address of the area where the structure is returned.
6312 0 otherwise. */
6313 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6314 if (call_expr_nargs (exp) != 0
6315 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6316 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6317 return const0_rtx;
6318 else
6319 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6321 case BUILT_IN_ALLOCA:
6322 target = expand_builtin_alloca (exp, target);
6323 if (target)
6324 return target;
6325 break;
6327 case BUILT_IN_STACK_SAVE:
6328 return expand_stack_save ();
6330 case BUILT_IN_STACK_RESTORE:
6331 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6332 return const0_rtx;
6334 case BUILT_IN_BSWAP32:
6335 case BUILT_IN_BSWAP64:
6336 target = expand_builtin_bswap (exp, target, subtarget);
6338 if (target)
6339 return target;
6340 break;
6342 CASE_INT_FN (BUILT_IN_FFS):
6343 case BUILT_IN_FFSIMAX:
6344 target = expand_builtin_unop (target_mode, exp, target,
6345 subtarget, ffs_optab);
6346 if (target)
6347 return target;
6348 break;
6350 CASE_INT_FN (BUILT_IN_CLZ):
6351 case BUILT_IN_CLZIMAX:
6352 target = expand_builtin_unop (target_mode, exp, target,
6353 subtarget, clz_optab);
6354 if (target)
6355 return target;
6356 break;
6358 CASE_INT_FN (BUILT_IN_CTZ):
6359 case BUILT_IN_CTZIMAX:
6360 target = expand_builtin_unop (target_mode, exp, target,
6361 subtarget, ctz_optab);
6362 if (target)
6363 return target;
6364 break;
6366 CASE_INT_FN (BUILT_IN_POPCOUNT):
6367 case BUILT_IN_POPCOUNTIMAX:
6368 target = expand_builtin_unop (target_mode, exp, target,
6369 subtarget, popcount_optab);
6370 if (target)
6371 return target;
6372 break;
6374 CASE_INT_FN (BUILT_IN_PARITY):
6375 case BUILT_IN_PARITYIMAX:
6376 target = expand_builtin_unop (target_mode, exp, target,
6377 subtarget, parity_optab);
6378 if (target)
6379 return target;
6380 break;
6382 case BUILT_IN_STRLEN:
6383 target = expand_builtin_strlen (exp, target, target_mode);
6384 if (target)
6385 return target;
6386 break;
6388 case BUILT_IN_STRCPY:
6389 target = expand_builtin_strcpy (fndecl, exp, target, mode);
6390 if (target)
6391 return target;
6392 break;
6394 case BUILT_IN_STRNCPY:
6395 target = expand_builtin_strncpy (exp, target, mode);
6396 if (target)
6397 return target;
6398 break;
6400 case BUILT_IN_STPCPY:
6401 target = expand_builtin_stpcpy (exp, target, mode);
6402 if (target)
6403 return target;
6404 break;
6406 case BUILT_IN_STRCAT:
6407 target = expand_builtin_strcat (fndecl, exp, target, mode);
6408 if (target)
6409 return target;
6410 break;
6412 case BUILT_IN_STRNCAT:
6413 target = expand_builtin_strncat (exp, target, mode);
6414 if (target)
6415 return target;
6416 break;
6418 case BUILT_IN_STRSPN:
6419 target = expand_builtin_strspn (exp, target, mode);
6420 if (target)
6421 return target;
6422 break;
6424 case BUILT_IN_STRCSPN:
6425 target = expand_builtin_strcspn (exp, target, mode);
6426 if (target)
6427 return target;
6428 break;
6430 case BUILT_IN_STRSTR:
6431 target = expand_builtin_strstr (exp, target, mode);
6432 if (target)
6433 return target;
6434 break;
6436 case BUILT_IN_STRPBRK:
6437 target = expand_builtin_strpbrk (exp, target, mode);
6438 if (target)
6439 return target;
6440 break;
6442 case BUILT_IN_INDEX:
6443 case BUILT_IN_STRCHR:
6444 target = expand_builtin_strchr (exp, target, mode);
6445 if (target)
6446 return target;
6447 break;
6449 case BUILT_IN_RINDEX:
6450 case BUILT_IN_STRRCHR:
6451 target = expand_builtin_strrchr (exp, target, mode);
6452 if (target)
6453 return target;
6454 break;
6456 case BUILT_IN_MEMCPY:
6457 target = expand_builtin_memcpy (exp, target, mode);
6458 if (target)
6459 return target;
6460 break;
6462 case BUILT_IN_MEMPCPY:
6463 target = expand_builtin_mempcpy (exp, target, mode);
6464 if (target)
6465 return target;
6466 break;
6468 case BUILT_IN_MEMMOVE:
6469 target = expand_builtin_memmove (exp, target, mode, ignore);
6470 if (target)
6471 return target;
6472 break;
6474 case BUILT_IN_BCOPY:
6475 target = expand_builtin_bcopy (exp, ignore);
6476 if (target)
6477 return target;
6478 break;
6480 case BUILT_IN_MEMSET:
6481 target = expand_builtin_memset (exp, target, mode);
6482 if (target)
6483 return target;
6484 break;
6486 case BUILT_IN_BZERO:
6487 target = expand_builtin_bzero (exp);
6488 if (target)
6489 return target;
6490 break;
6492 case BUILT_IN_STRCMP:
6493 target = expand_builtin_strcmp (exp, target, mode);
6494 if (target)
6495 return target;
6496 break;
6498 case BUILT_IN_STRNCMP:
6499 target = expand_builtin_strncmp (exp, target, mode);
6500 if (target)
6501 return target;
6502 break;
6504 case BUILT_IN_MEMCHR:
6505 target = expand_builtin_memchr (exp, target, mode);
6506 if (target)
6507 return target;
6508 break;
6510 case BUILT_IN_BCMP:
6511 case BUILT_IN_MEMCMP:
6512 target = expand_builtin_memcmp (exp, target, mode);
6513 if (target)
6514 return target;
6515 break;
6517 case BUILT_IN_SETJMP:
6518 /* This should have been lowered to the builtins below. */
6519 gcc_unreachable ();
6521 case BUILT_IN_SETJMP_SETUP:
6522 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6523 and the receiver label. */
6524 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6526 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6527 VOIDmode, EXPAND_NORMAL);
6528 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6529 rtx label_r = label_rtx (label);
6531 /* This is copied from the handling of non-local gotos. */
6532 expand_builtin_setjmp_setup (buf_addr, label_r);
6533 nonlocal_goto_handler_labels
6534 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6535 nonlocal_goto_handler_labels);
6536 /* ??? Do not let expand_label treat us as such since we would
6537 not want to be both on the list of non-local labels and on
6538 the list of forced labels. */
6539 FORCED_LABEL (label) = 0;
6540 return const0_rtx;
6542 break;
6544 case BUILT_IN_SETJMP_DISPATCHER:
6545 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6546 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6548 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6549 rtx label_r = label_rtx (label);
6551 /* Remove the dispatcher label from the list of non-local labels
6552 since the receiver labels have been added to it above. */
6553 remove_node_from_expr_list (label_r, &nonlocal_goto_handler_labels);
6554 return const0_rtx;
6556 break;
6558 case BUILT_IN_SETJMP_RECEIVER:
6559 /* __builtin_setjmp_receiver is passed the receiver label. */
6560 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6562 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6563 rtx label_r = label_rtx (label);
6565 expand_builtin_setjmp_receiver (label_r);
6566 return const0_rtx;
6568 break;
6570 /* __builtin_longjmp is passed a pointer to an array of five words.
6571 It's similar to the C library longjmp function but works with
6572 __builtin_setjmp above. */
6573 case BUILT_IN_LONGJMP:
6574 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6576 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6577 VOIDmode, EXPAND_NORMAL);
6578 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6580 if (value != const1_rtx)
6582 error ("%<__builtin_longjmp%> second argument must be 1");
6583 return const0_rtx;
6586 expand_builtin_longjmp (buf_addr, value);
6587 return const0_rtx;
6589 break;
6591 case BUILT_IN_NONLOCAL_GOTO:
6592 target = expand_builtin_nonlocal_goto (exp);
6593 if (target)
6594 return target;
6595 break;
6597 /* This updates the setjmp buffer that is its argument with the value
6598 of the current stack pointer. */
6599 case BUILT_IN_UPDATE_SETJMP_BUF:
6600 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6602 rtx buf_addr
6603 = expand_normal (CALL_EXPR_ARG (exp, 0));
6605 expand_builtin_update_setjmp_buf (buf_addr);
6606 return const0_rtx;
6608 break;
6610 case BUILT_IN_TRAP:
6611 expand_builtin_trap ();
6612 return const0_rtx;
6614 case BUILT_IN_PRINTF:
6615 target = expand_builtin_printf (exp, target, mode, false);
6616 if (target)
6617 return target;
6618 break;
6620 case BUILT_IN_PRINTF_UNLOCKED:
6621 target = expand_builtin_printf (exp, target, mode, true);
6622 if (target)
6623 return target;
6624 break;
6626 case BUILT_IN_FPUTS:
6627 target = expand_builtin_fputs (exp, target, false);
6628 if (target)
6629 return target;
6630 break;
6631 case BUILT_IN_FPUTS_UNLOCKED:
6632 target = expand_builtin_fputs (exp, target, true);
6633 if (target)
6634 return target;
6635 break;
6637 case BUILT_IN_FPRINTF:
6638 target = expand_builtin_fprintf (exp, target, mode, false);
6639 if (target)
6640 return target;
6641 break;
6643 case BUILT_IN_FPRINTF_UNLOCKED:
6644 target = expand_builtin_fprintf (exp, target, mode, true);
6645 if (target)
6646 return target;
6647 break;
6649 case BUILT_IN_SPRINTF:
6650 target = expand_builtin_sprintf (exp, target, mode);
6651 if (target)
6652 return target;
6653 break;
6655 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6656 case BUILT_IN_SIGNBITD32:
6657 case BUILT_IN_SIGNBITD64:
6658 case BUILT_IN_SIGNBITD128:
6659 target = expand_builtin_signbit (exp, target);
6660 if (target)
6661 return target;
6662 break;
6664 /* Various hooks for the DWARF 2 __throw routine. */
6665 case BUILT_IN_UNWIND_INIT:
6666 expand_builtin_unwind_init ();
6667 return const0_rtx;
6668 case BUILT_IN_DWARF_CFA:
6669 return virtual_cfa_rtx;
6670 #ifdef DWARF2_UNWIND_INFO
6671 case BUILT_IN_DWARF_SP_COLUMN:
6672 return expand_builtin_dwarf_sp_column ();
6673 case BUILT_IN_INIT_DWARF_REG_SIZES:
6674 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6675 return const0_rtx;
6676 #endif
6677 case BUILT_IN_FROB_RETURN_ADDR:
6678 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6679 case BUILT_IN_EXTRACT_RETURN_ADDR:
6680 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6681 case BUILT_IN_EH_RETURN:
6682 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6683 CALL_EXPR_ARG (exp, 1));
6684 return const0_rtx;
6685 #ifdef EH_RETURN_DATA_REGNO
6686 case BUILT_IN_EH_RETURN_DATA_REGNO:
6687 return expand_builtin_eh_return_data_regno (exp);
6688 #endif
6689 case BUILT_IN_EXTEND_POINTER:
6690 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6692 case BUILT_IN_VA_START:
6693 return expand_builtin_va_start (exp);
6694 case BUILT_IN_VA_END:
6695 return expand_builtin_va_end (exp);
6696 case BUILT_IN_VA_COPY:
6697 return expand_builtin_va_copy (exp);
6698 case BUILT_IN_EXPECT:
6699 return expand_builtin_expect (exp, target);
6700 case BUILT_IN_PREFETCH:
6701 expand_builtin_prefetch (exp);
6702 return const0_rtx;
6704 case BUILT_IN_PROFILE_FUNC_ENTER:
6705 return expand_builtin_profile_func (false);
6706 case BUILT_IN_PROFILE_FUNC_EXIT:
6707 return expand_builtin_profile_func (true);
6709 case BUILT_IN_INIT_TRAMPOLINE:
6710 return expand_builtin_init_trampoline (exp);
6711 case BUILT_IN_ADJUST_TRAMPOLINE:
6712 return expand_builtin_adjust_trampoline (exp);
6714 case BUILT_IN_FORK:
6715 case BUILT_IN_EXECL:
6716 case BUILT_IN_EXECV:
6717 case BUILT_IN_EXECLP:
6718 case BUILT_IN_EXECLE:
6719 case BUILT_IN_EXECVP:
6720 case BUILT_IN_EXECVE:
6721 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6722 if (target)
6723 return target;
6724 break;
6726 case BUILT_IN_FETCH_AND_ADD_1:
6727 case BUILT_IN_FETCH_AND_ADD_2:
6728 case BUILT_IN_FETCH_AND_ADD_4:
6729 case BUILT_IN_FETCH_AND_ADD_8:
6730 case BUILT_IN_FETCH_AND_ADD_16:
6731 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_ADD_1);
6732 target = expand_builtin_sync_operation (mode, exp, PLUS,
6733 false, target, ignore);
6734 if (target)
6735 return target;
6736 break;
6738 case BUILT_IN_FETCH_AND_SUB_1:
6739 case BUILT_IN_FETCH_AND_SUB_2:
6740 case BUILT_IN_FETCH_AND_SUB_4:
6741 case BUILT_IN_FETCH_AND_SUB_8:
6742 case BUILT_IN_FETCH_AND_SUB_16:
6743 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_SUB_1);
6744 target = expand_builtin_sync_operation (mode, exp, MINUS,
6745 false, target, ignore);
6746 if (target)
6747 return target;
6748 break;
6750 case BUILT_IN_FETCH_AND_OR_1:
6751 case BUILT_IN_FETCH_AND_OR_2:
6752 case BUILT_IN_FETCH_AND_OR_4:
6753 case BUILT_IN_FETCH_AND_OR_8:
6754 case BUILT_IN_FETCH_AND_OR_16:
6755 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_OR_1);
6756 target = expand_builtin_sync_operation (mode, exp, IOR,
6757 false, target, ignore);
6758 if (target)
6759 return target;
6760 break;
6762 case BUILT_IN_FETCH_AND_AND_1:
6763 case BUILT_IN_FETCH_AND_AND_2:
6764 case BUILT_IN_FETCH_AND_AND_4:
6765 case BUILT_IN_FETCH_AND_AND_8:
6766 case BUILT_IN_FETCH_AND_AND_16:
6767 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_AND_1);
6768 target = expand_builtin_sync_operation (mode, exp, AND,
6769 false, target, ignore);
6770 if (target)
6771 return target;
6772 break;
6774 case BUILT_IN_FETCH_AND_XOR_1:
6775 case BUILT_IN_FETCH_AND_XOR_2:
6776 case BUILT_IN_FETCH_AND_XOR_4:
6777 case BUILT_IN_FETCH_AND_XOR_8:
6778 case BUILT_IN_FETCH_AND_XOR_16:
6779 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_XOR_1);
6780 target = expand_builtin_sync_operation (mode, exp, XOR,
6781 false, target, ignore);
6782 if (target)
6783 return target;
6784 break;
6786 case BUILT_IN_FETCH_AND_NAND_1:
6787 case BUILT_IN_FETCH_AND_NAND_2:
6788 case BUILT_IN_FETCH_AND_NAND_4:
6789 case BUILT_IN_FETCH_AND_NAND_8:
6790 case BUILT_IN_FETCH_AND_NAND_16:
6791 mode = get_builtin_sync_mode (fcode - BUILT_IN_FETCH_AND_NAND_1);
6792 target = expand_builtin_sync_operation (mode, exp, NOT,
6793 false, target, ignore);
6794 if (target)
6795 return target;
6796 break;
6798 case BUILT_IN_ADD_AND_FETCH_1:
6799 case BUILT_IN_ADD_AND_FETCH_2:
6800 case BUILT_IN_ADD_AND_FETCH_4:
6801 case BUILT_IN_ADD_AND_FETCH_8:
6802 case BUILT_IN_ADD_AND_FETCH_16:
6803 mode = get_builtin_sync_mode (fcode - BUILT_IN_ADD_AND_FETCH_1);
6804 target = expand_builtin_sync_operation (mode, exp, PLUS,
6805 true, target, ignore);
6806 if (target)
6807 return target;
6808 break;
6810 case BUILT_IN_SUB_AND_FETCH_1:
6811 case BUILT_IN_SUB_AND_FETCH_2:
6812 case BUILT_IN_SUB_AND_FETCH_4:
6813 case BUILT_IN_SUB_AND_FETCH_8:
6814 case BUILT_IN_SUB_AND_FETCH_16:
6815 mode = get_builtin_sync_mode (fcode - BUILT_IN_SUB_AND_FETCH_1);
6816 target = expand_builtin_sync_operation (mode, exp, MINUS,
6817 true, target, ignore);
6818 if (target)
6819 return target;
6820 break;
6822 case BUILT_IN_OR_AND_FETCH_1:
6823 case BUILT_IN_OR_AND_FETCH_2:
6824 case BUILT_IN_OR_AND_FETCH_4:
6825 case BUILT_IN_OR_AND_FETCH_8:
6826 case BUILT_IN_OR_AND_FETCH_16:
6827 mode = get_builtin_sync_mode (fcode - BUILT_IN_OR_AND_FETCH_1);
6828 target = expand_builtin_sync_operation (mode, exp, IOR,
6829 true, target, ignore);
6830 if (target)
6831 return target;
6832 break;
6834 case BUILT_IN_AND_AND_FETCH_1:
6835 case BUILT_IN_AND_AND_FETCH_2:
6836 case BUILT_IN_AND_AND_FETCH_4:
6837 case BUILT_IN_AND_AND_FETCH_8:
6838 case BUILT_IN_AND_AND_FETCH_16:
6839 mode = get_builtin_sync_mode (fcode - BUILT_IN_AND_AND_FETCH_1);
6840 target = expand_builtin_sync_operation (mode, exp, AND,
6841 true, target, ignore);
6842 if (target)
6843 return target;
6844 break;
6846 case BUILT_IN_XOR_AND_FETCH_1:
6847 case BUILT_IN_XOR_AND_FETCH_2:
6848 case BUILT_IN_XOR_AND_FETCH_4:
6849 case BUILT_IN_XOR_AND_FETCH_8:
6850 case BUILT_IN_XOR_AND_FETCH_16:
6851 mode = get_builtin_sync_mode (fcode - BUILT_IN_XOR_AND_FETCH_1);
6852 target = expand_builtin_sync_operation (mode, exp, XOR,
6853 true, target, ignore);
6854 if (target)
6855 return target;
6856 break;
6858 case BUILT_IN_NAND_AND_FETCH_1:
6859 case BUILT_IN_NAND_AND_FETCH_2:
6860 case BUILT_IN_NAND_AND_FETCH_4:
6861 case BUILT_IN_NAND_AND_FETCH_8:
6862 case BUILT_IN_NAND_AND_FETCH_16:
6863 mode = get_builtin_sync_mode (fcode - BUILT_IN_NAND_AND_FETCH_1);
6864 target = expand_builtin_sync_operation (mode, exp, NOT,
6865 true, target, ignore);
6866 if (target)
6867 return target;
6868 break;
6870 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1:
6871 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2:
6872 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4:
6873 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8:
6874 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16:
6875 if (mode == VOIDmode)
6876 mode = TYPE_MODE (boolean_type_node);
6877 if (!target || !register_operand (target, mode))
6878 target = gen_reg_rtx (mode);
6880 mode = get_builtin_sync_mode (fcode - BUILT_IN_BOOL_COMPARE_AND_SWAP_1);
6881 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6882 if (target)
6883 return target;
6884 break;
6886 case BUILT_IN_VAL_COMPARE_AND_SWAP_1:
6887 case BUILT_IN_VAL_COMPARE_AND_SWAP_2:
6888 case BUILT_IN_VAL_COMPARE_AND_SWAP_4:
6889 case BUILT_IN_VAL_COMPARE_AND_SWAP_8:
6890 case BUILT_IN_VAL_COMPARE_AND_SWAP_16:
6891 mode = get_builtin_sync_mode (fcode - BUILT_IN_VAL_COMPARE_AND_SWAP_1);
6892 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6893 if (target)
6894 return target;
6895 break;
6897 case BUILT_IN_LOCK_TEST_AND_SET_1:
6898 case BUILT_IN_LOCK_TEST_AND_SET_2:
6899 case BUILT_IN_LOCK_TEST_AND_SET_4:
6900 case BUILT_IN_LOCK_TEST_AND_SET_8:
6901 case BUILT_IN_LOCK_TEST_AND_SET_16:
6902 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_TEST_AND_SET_1);
6903 target = expand_builtin_lock_test_and_set (mode, exp, target);
6904 if (target)
6905 return target;
6906 break;
6908 case BUILT_IN_LOCK_RELEASE_1:
6909 case BUILT_IN_LOCK_RELEASE_2:
6910 case BUILT_IN_LOCK_RELEASE_4:
6911 case BUILT_IN_LOCK_RELEASE_8:
6912 case BUILT_IN_LOCK_RELEASE_16:
6913 mode = get_builtin_sync_mode (fcode - BUILT_IN_LOCK_RELEASE_1);
6914 expand_builtin_lock_release (mode, exp);
6915 return const0_rtx;
6917 case BUILT_IN_SYNCHRONIZE:
6918 expand_builtin_synchronize ();
6919 return const0_rtx;
6921 case BUILT_IN_OBJECT_SIZE:
6922 return expand_builtin_object_size (exp);
6924 case BUILT_IN_MEMCPY_CHK:
6925 case BUILT_IN_MEMPCPY_CHK:
6926 case BUILT_IN_MEMMOVE_CHK:
6927 case BUILT_IN_MEMSET_CHK:
6928 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6929 if (target)
6930 return target;
6931 break;
6933 case BUILT_IN_STRCPY_CHK:
6934 case BUILT_IN_STPCPY_CHK:
6935 case BUILT_IN_STRNCPY_CHK:
6936 case BUILT_IN_STRCAT_CHK:
6937 case BUILT_IN_STRNCAT_CHK:
6938 case BUILT_IN_SNPRINTF_CHK:
6939 case BUILT_IN_VSNPRINTF_CHK:
6940 maybe_emit_chk_warning (exp, fcode);
6941 break;
6943 case BUILT_IN_SPRINTF_CHK:
6944 case BUILT_IN_VSPRINTF_CHK:
6945 maybe_emit_sprintf_chk_warning (exp, fcode);
6946 break;
6948 default: /* just do library call, if unknown builtin */
6949 break;
6952 /* The switch statement above can drop through to cause the function
6953 to be called normally. */
6954 return expand_call (exp, target, ignore);
6957 /* Determine whether a tree node represents a call to a built-in
6958 function. If the tree T is a call to a built-in function with
6959 the right number of arguments of the appropriate types, return
6960 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6961 Otherwise the return value is END_BUILTINS. */
6963 enum built_in_function
6964 builtin_mathfn_code (const_tree t)
6966 const_tree fndecl, arg, parmlist;
6967 const_tree argtype, parmtype;
6968 const_call_expr_arg_iterator iter;
6970 if (TREE_CODE (t) != CALL_EXPR
6971 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6972 return END_BUILTINS;
6974 fndecl = get_callee_fndecl (t);
6975 if (fndecl == NULL_TREE
6976 || TREE_CODE (fndecl) != FUNCTION_DECL
6977 || ! DECL_BUILT_IN (fndecl)
6978 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6979 return END_BUILTINS;
6981 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6982 init_const_call_expr_arg_iterator (t, &iter);
6983 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6985 /* If a function doesn't take a variable number of arguments,
6986 the last element in the list will have type `void'. */
6987 parmtype = TREE_VALUE (parmlist);
6988 if (VOID_TYPE_P (parmtype))
6990 if (more_const_call_expr_args_p (&iter))
6991 return END_BUILTINS;
6992 return DECL_FUNCTION_CODE (fndecl);
6995 if (! more_const_call_expr_args_p (&iter))
6996 return END_BUILTINS;
6998 arg = next_const_call_expr_arg (&iter);
6999 argtype = TREE_TYPE (arg);
7001 if (SCALAR_FLOAT_TYPE_P (parmtype))
7003 if (! SCALAR_FLOAT_TYPE_P (argtype))
7004 return END_BUILTINS;
7006 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7008 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7009 return END_BUILTINS;
7011 else if (POINTER_TYPE_P (parmtype))
7013 if (! POINTER_TYPE_P (argtype))
7014 return END_BUILTINS;
7016 else if (INTEGRAL_TYPE_P (parmtype))
7018 if (! INTEGRAL_TYPE_P (argtype))
7019 return END_BUILTINS;
7021 else
7022 return END_BUILTINS;
7025 /* Variable-length argument list. */
7026 return DECL_FUNCTION_CODE (fndecl);
7029 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7030 evaluate to a constant. */
7032 static tree
7033 fold_builtin_constant_p (tree arg)
7035 /* We return 1 for a numeric type that's known to be a constant
7036 value at compile-time or for an aggregate type that's a
7037 literal constant. */
7038 STRIP_NOPS (arg);
7040 /* If we know this is a constant, emit the constant of one. */
7041 if (CONSTANT_CLASS_P (arg)
7042 || (TREE_CODE (arg) == CONSTRUCTOR
7043 && TREE_CONSTANT (arg)))
7044 return integer_one_node;
7045 if (TREE_CODE (arg) == ADDR_EXPR)
7047 tree op = TREE_OPERAND (arg, 0);
7048 if (TREE_CODE (op) == STRING_CST
7049 || (TREE_CODE (op) == ARRAY_REF
7050 && integer_zerop (TREE_OPERAND (op, 1))
7051 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7052 return integer_one_node;
7055 /* If this expression has side effects, show we don't know it to be a
7056 constant. Likewise if it's a pointer or aggregate type since in
7057 those case we only want literals, since those are only optimized
7058 when generating RTL, not later.
7059 And finally, if we are compiling an initializer, not code, we
7060 need to return a definite result now; there's not going to be any
7061 more optimization done. */
7062 if (TREE_SIDE_EFFECTS (arg)
7063 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7064 || POINTER_TYPE_P (TREE_TYPE (arg))
7065 || cfun == 0
7066 || folding_initializer)
7067 return integer_zero_node;
7069 return NULL_TREE;
7072 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7073 return it as a truthvalue. */
7075 static tree
7076 build_builtin_expect_predicate (tree pred, tree expected)
7078 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7080 fn = built_in_decls[BUILT_IN_EXPECT];
7081 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7082 ret_type = TREE_TYPE (TREE_TYPE (fn));
7083 pred_type = TREE_VALUE (arg_types);
7084 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7086 pred = fold_convert (pred_type, pred);
7087 expected = fold_convert (expected_type, expected);
7088 call_expr = build_call_expr (fn, 2, pred, expected);
7090 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7091 build_int_cst (ret_type, 0));
7094 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7095 NULL_TREE if no simplification is possible. */
7097 static tree
7098 fold_builtin_expect (tree arg0, tree arg1)
7100 tree inner, fndecl;
7101 enum tree_code code;
7103 /* If this is a builtin_expect within a builtin_expect keep the
7104 inner one. See through a comparison against a constant. It
7105 might have been added to create a thruthvalue. */
7106 inner = arg0;
7107 if (COMPARISON_CLASS_P (inner)
7108 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7109 inner = TREE_OPERAND (inner, 0);
7111 if (TREE_CODE (inner) == CALL_EXPR
7112 && (fndecl = get_callee_fndecl (inner))
7113 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7114 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7115 return arg0;
7117 /* Distribute the expected value over short-circuiting operators.
7118 See through the cast from truthvalue_type_node to long. */
7119 inner = arg0;
7120 while (TREE_CODE (inner) == NOP_EXPR
7121 && INTEGRAL_TYPE_P (TREE_TYPE (inner))
7122 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner, 0))))
7123 inner = TREE_OPERAND (inner, 0);
7125 code = TREE_CODE (inner);
7126 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7128 tree op0 = TREE_OPERAND (inner, 0);
7129 tree op1 = TREE_OPERAND (inner, 1);
7131 op0 = build_builtin_expect_predicate (op0, arg1);
7132 op1 = build_builtin_expect_predicate (op1, arg1);
7133 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7135 return fold_convert (TREE_TYPE (arg0), inner);
7138 /* If the argument isn't invariant then there's nothing else we can do. */
7139 if (!TREE_CONSTANT (arg0))
7140 return NULL_TREE;
7142 /* If we expect that a comparison against the argument will fold to
7143 a constant return the constant. In practice, this means a true
7144 constant or the address of a non-weak symbol. */
7145 inner = arg0;
7146 STRIP_NOPS (inner);
7147 if (TREE_CODE (inner) == ADDR_EXPR)
7151 inner = TREE_OPERAND (inner, 0);
7153 while (TREE_CODE (inner) == COMPONENT_REF
7154 || TREE_CODE (inner) == ARRAY_REF);
7155 if (DECL_P (inner) && DECL_WEAK (inner))
7156 return NULL_TREE;
7159 /* Otherwise, ARG0 already has the proper type for the return value. */
7160 return arg0;
7163 /* Fold a call to __builtin_classify_type with argument ARG. */
7165 static tree
7166 fold_builtin_classify_type (tree arg)
7168 if (arg == 0)
7169 return build_int_cst (NULL_TREE, no_type_class);
7171 return build_int_cst (NULL_TREE, type_to_class (TREE_TYPE (arg)));
7174 /* Fold a call to __builtin_strlen with argument ARG. */
7176 static tree
7177 fold_builtin_strlen (tree arg)
7179 if (!validate_arg (arg, POINTER_TYPE))
7180 return NULL_TREE;
7181 else
7183 tree len = c_strlen (arg, 0);
7185 if (len)
7187 /* Convert from the internal "sizetype" type to "size_t". */
7188 if (size_type_node)
7189 len = fold_convert (size_type_node, len);
7190 return len;
7193 return NULL_TREE;
7197 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7199 static tree
7200 fold_builtin_inf (tree type, int warn)
7202 REAL_VALUE_TYPE real;
7204 /* __builtin_inff is intended to be usable to define INFINITY on all
7205 targets. If an infinity is not available, INFINITY expands "to a
7206 positive constant of type float that overflows at translation
7207 time", footnote "In this case, using INFINITY will violate the
7208 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7209 Thus we pedwarn to ensure this constraint violation is
7210 diagnosed. */
7211 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7212 pedwarn ("target format does not support infinity");
7214 real_inf (&real);
7215 return build_real (type, real);
7218 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7220 static tree
7221 fold_builtin_nan (tree arg, tree type, int quiet)
7223 REAL_VALUE_TYPE real;
7224 const char *str;
7226 if (!validate_arg (arg, POINTER_TYPE))
7227 return NULL_TREE;
7228 str = c_getstr (arg);
7229 if (!str)
7230 return NULL_TREE;
7232 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7233 return NULL_TREE;
7235 return build_real (type, real);
7238 /* Return true if the floating point expression T has an integer value.
7239 We also allow +Inf, -Inf and NaN to be considered integer values. */
7241 static bool
7242 integer_valued_real_p (tree t)
7244 switch (TREE_CODE (t))
7246 case FLOAT_EXPR:
7247 return true;
7249 case ABS_EXPR:
7250 case SAVE_EXPR:
7251 return integer_valued_real_p (TREE_OPERAND (t, 0));
7253 case COMPOUND_EXPR:
7254 case MODIFY_EXPR:
7255 case BIND_EXPR:
7256 return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
7258 case PLUS_EXPR:
7259 case MINUS_EXPR:
7260 case MULT_EXPR:
7261 case MIN_EXPR:
7262 case MAX_EXPR:
7263 return integer_valued_real_p (TREE_OPERAND (t, 0))
7264 && integer_valued_real_p (TREE_OPERAND (t, 1));
7266 case COND_EXPR:
7267 return integer_valued_real_p (TREE_OPERAND (t, 1))
7268 && integer_valued_real_p (TREE_OPERAND (t, 2));
7270 case REAL_CST:
7271 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7273 case NOP_EXPR:
7275 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7276 if (TREE_CODE (type) == INTEGER_TYPE)
7277 return true;
7278 if (TREE_CODE (type) == REAL_TYPE)
7279 return integer_valued_real_p (TREE_OPERAND (t, 0));
7280 break;
7283 case CALL_EXPR:
7284 switch (builtin_mathfn_code (t))
7286 CASE_FLT_FN (BUILT_IN_CEIL):
7287 CASE_FLT_FN (BUILT_IN_FLOOR):
7288 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7289 CASE_FLT_FN (BUILT_IN_RINT):
7290 CASE_FLT_FN (BUILT_IN_ROUND):
7291 CASE_FLT_FN (BUILT_IN_TRUNC):
7292 return true;
7294 CASE_FLT_FN (BUILT_IN_FMIN):
7295 CASE_FLT_FN (BUILT_IN_FMAX):
7296 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7297 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7299 default:
7300 break;
7302 break;
7304 default:
7305 break;
7307 return false;
7310 /* FNDECL is assumed to be a builtin where truncation can be propagated
7311 across (for instance floor((double)f) == (double)floorf (f).
7312 Do the transformation for a call with argument ARG. */
7314 static tree
7315 fold_trunc_transparent_mathfn (tree fndecl, tree arg)
7317 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7319 if (!validate_arg (arg, REAL_TYPE))
7320 return NULL_TREE;
7322 /* Integer rounding functions are idempotent. */
7323 if (fcode == builtin_mathfn_code (arg))
7324 return arg;
7326 /* If argument is already integer valued, and we don't need to worry
7327 about setting errno, there's no need to perform rounding. */
7328 if (! flag_errno_math && integer_valued_real_p (arg))
7329 return arg;
7331 if (optimize)
7333 tree arg0 = strip_float_extensions (arg);
7334 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7335 tree newtype = TREE_TYPE (arg0);
7336 tree decl;
7338 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7339 && (decl = mathfn_built_in (newtype, fcode)))
7340 return fold_convert (ftype,
7341 build_call_expr (decl, 1,
7342 fold_convert (newtype, arg0)));
7344 return NULL_TREE;
7347 /* FNDECL is assumed to be builtin which can narrow the FP type of
7348 the argument, for instance lround((double)f) -> lroundf (f).
7349 Do the transformation for a call with argument ARG. */
7351 static tree
7352 fold_fixed_mathfn (tree fndecl, tree arg)
7354 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7356 if (!validate_arg (arg, REAL_TYPE))
7357 return NULL_TREE;
7359 /* If argument is already integer valued, and we don't need to worry
7360 about setting errno, there's no need to perform rounding. */
7361 if (! flag_errno_math && integer_valued_real_p (arg))
7362 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)), arg);
7364 if (optimize)
7366 tree ftype = TREE_TYPE (arg);
7367 tree arg0 = strip_float_extensions (arg);
7368 tree newtype = TREE_TYPE (arg0);
7369 tree decl;
7371 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7372 && (decl = mathfn_built_in (newtype, fcode)))
7373 return build_call_expr (decl, 1, fold_convert (newtype, arg0));
7376 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7377 sizeof (long long) == sizeof (long). */
7378 if (TYPE_PRECISION (long_long_integer_type_node)
7379 == TYPE_PRECISION (long_integer_type_node))
7381 tree newfn = NULL_TREE;
7382 switch (fcode)
7384 CASE_FLT_FN (BUILT_IN_LLCEIL):
7385 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7386 break;
7388 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7389 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7390 break;
7392 CASE_FLT_FN (BUILT_IN_LLROUND):
7393 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7394 break;
7396 CASE_FLT_FN (BUILT_IN_LLRINT):
7397 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7398 break;
7400 default:
7401 break;
7404 if (newfn)
7406 tree newcall = build_call_expr(newfn, 1, arg);
7407 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7411 return NULL_TREE;
7414 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7415 return type. Return NULL_TREE if no simplification can be made. */
7417 static tree
7418 fold_builtin_cabs (tree arg, tree type, tree fndecl)
7420 tree res;
7422 if (TREE_CODE (TREE_TYPE (arg)) != COMPLEX_TYPE
7423 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7424 return NULL_TREE;
7426 /* Calculate the result when the argument is a constant. */
7427 if (TREE_CODE (arg) == COMPLEX_CST
7428 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7429 type, mpfr_hypot)))
7430 return res;
7432 if (TREE_CODE (arg) == COMPLEX_EXPR)
7434 tree real = TREE_OPERAND (arg, 0);
7435 tree imag = TREE_OPERAND (arg, 1);
7437 /* If either part is zero, cabs is fabs of the other. */
7438 if (real_zerop (real))
7439 return fold_build1 (ABS_EXPR, type, imag);
7440 if (real_zerop (imag))
7441 return fold_build1 (ABS_EXPR, type, real);
7443 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7444 if (flag_unsafe_math_optimizations
7445 && operand_equal_p (real, imag, OEP_PURE_SAME))
7447 const REAL_VALUE_TYPE sqrt2_trunc
7448 = real_value_truncate (TYPE_MODE (type),
7449 *get_real_const (rv_sqrt2));
7450 STRIP_NOPS (real);
7451 return fold_build2 (MULT_EXPR, type,
7452 fold_build1 (ABS_EXPR, type, real),
7453 build_real (type, sqrt2_trunc));
7457 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7458 if (TREE_CODE (arg) == NEGATE_EXPR
7459 || TREE_CODE (arg) == CONJ_EXPR)
7460 return build_call_expr (fndecl, 1, TREE_OPERAND (arg, 0));
7462 /* Don't do this when optimizing for size. */
7463 if (flag_unsafe_math_optimizations
7464 && optimize && !optimize_size)
7466 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7468 if (sqrtfn != NULL_TREE)
7470 tree rpart, ipart, result;
7472 arg = builtin_save_expr (arg);
7474 rpart = fold_build1 (REALPART_EXPR, type, arg);
7475 ipart = fold_build1 (IMAGPART_EXPR, type, arg);
7477 rpart = builtin_save_expr (rpart);
7478 ipart = builtin_save_expr (ipart);
7480 result = fold_build2 (PLUS_EXPR, type,
7481 fold_build2 (MULT_EXPR, type,
7482 rpart, rpart),
7483 fold_build2 (MULT_EXPR, type,
7484 ipart, ipart));
7486 return build_call_expr (sqrtfn, 1, result);
7490 return NULL_TREE;
7493 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7494 Return NULL_TREE if no simplification can be made. */
7496 static tree
7497 fold_builtin_sqrt (tree arg, tree type)
7500 enum built_in_function fcode;
7501 tree res;
7503 if (!validate_arg (arg, REAL_TYPE))
7504 return NULL_TREE;
7506 /* Calculate the result when the argument is a constant. */
7507 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7508 return res;
7510 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7511 fcode = builtin_mathfn_code (arg);
7512 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7514 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7515 arg = fold_build2 (MULT_EXPR, type,
7516 CALL_EXPR_ARG (arg, 0),
7517 build_real (type, dconsthalf));
7518 return build_call_expr (expfn, 1, arg);
7521 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7522 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7524 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7526 if (powfn)
7528 tree arg0 = CALL_EXPR_ARG (arg, 0);
7529 tree tree_root;
7530 /* The inner root was either sqrt or cbrt. */
7531 REAL_VALUE_TYPE dconstroot =
7532 BUILTIN_SQRT_P (fcode) ? dconsthalf : *get_real_const (rv_third);
7534 /* Adjust for the outer root. */
7535 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7536 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7537 tree_root = build_real (type, dconstroot);
7538 return build_call_expr (powfn, 2, arg0, tree_root);
7542 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7543 if (flag_unsafe_math_optimizations
7544 && (fcode == BUILT_IN_POW
7545 || fcode == BUILT_IN_POWF
7546 || fcode == BUILT_IN_POWL))
7548 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7549 tree arg0 = CALL_EXPR_ARG (arg, 0);
7550 tree arg1 = CALL_EXPR_ARG (arg, 1);
7551 tree narg1;
7552 if (!tree_expr_nonnegative_p (arg0))
7553 arg0 = build1 (ABS_EXPR, type, arg0);
7554 narg1 = fold_build2 (MULT_EXPR, type, arg1,
7555 build_real (type, dconsthalf));
7556 return build_call_expr (powfn, 2, arg0, narg1);
7559 return NULL_TREE;
7562 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7563 Return NULL_TREE if no simplification can be made. */
7565 static tree
7566 fold_builtin_cbrt (tree arg, tree type)
7568 const enum built_in_function fcode = builtin_mathfn_code (arg);
7569 tree res;
7571 if (!validate_arg (arg, REAL_TYPE))
7572 return NULL_TREE;
7574 /* Calculate the result when the argument is a constant. */
7575 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7576 return res;
7578 if (flag_unsafe_math_optimizations)
7580 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7581 if (BUILTIN_EXPONENT_P (fcode))
7583 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7584 const REAL_VALUE_TYPE third_trunc =
7585 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_third));
7586 arg = fold_build2 (MULT_EXPR, type,
7587 CALL_EXPR_ARG (arg, 0),
7588 build_real (type, third_trunc));
7589 return build_call_expr (expfn, 1, arg);
7592 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7593 if (BUILTIN_SQRT_P (fcode))
7595 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7597 if (powfn)
7599 tree arg0 = CALL_EXPR_ARG (arg, 0);
7600 tree tree_root;
7601 REAL_VALUE_TYPE dconstroot = *get_real_const (rv_third);
7603 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7604 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7605 tree_root = build_real (type, dconstroot);
7606 return build_call_expr (powfn, 2, arg0, tree_root);
7610 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7611 if (BUILTIN_CBRT_P (fcode))
7613 tree arg0 = CALL_EXPR_ARG (arg, 0);
7614 if (tree_expr_nonnegative_p (arg0))
7616 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7618 if (powfn)
7620 tree tree_root;
7621 REAL_VALUE_TYPE dconstroot;
7623 real_arithmetic (&dconstroot, MULT_EXPR,
7624 get_real_const (rv_third),
7625 get_real_const (rv_third));
7626 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7627 tree_root = build_real (type, dconstroot);
7628 return build_call_expr (powfn, 2, arg0, tree_root);
7633 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7634 if (fcode == BUILT_IN_POW
7635 || fcode == BUILT_IN_POWF
7636 || fcode == BUILT_IN_POWL)
7638 tree arg00 = CALL_EXPR_ARG (arg, 0);
7639 tree arg01 = CALL_EXPR_ARG (arg, 1);
7640 if (tree_expr_nonnegative_p (arg00))
7642 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7643 const REAL_VALUE_TYPE dconstroot
7644 = real_value_truncate (TYPE_MODE (type),
7645 *get_real_const (rv_third));
7646 tree narg01 = fold_build2 (MULT_EXPR, type, arg01,
7647 build_real (type, dconstroot));
7648 return build_call_expr (powfn, 2, arg00, narg01);
7652 return NULL_TREE;
7655 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7656 TYPE is the type of the return value. Return NULL_TREE if no
7657 simplification can be made. */
7659 static tree
7660 fold_builtin_cos (tree arg, tree type, tree fndecl)
7662 tree res, narg;
7664 if (!validate_arg (arg, REAL_TYPE))
7665 return NULL_TREE;
7667 /* Calculate the result when the argument is a constant. */
7668 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7669 return res;
7671 /* Optimize cos(-x) into cos (x). */
7672 if ((narg = fold_strip_sign_ops (arg)))
7673 return build_call_expr (fndecl, 1, narg);
7675 return NULL_TREE;
7678 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7679 Return NULL_TREE if no simplification can be made. */
7681 static tree
7682 fold_builtin_cosh (tree arg, tree type, tree fndecl)
7684 if (validate_arg (arg, REAL_TYPE))
7686 tree res, narg;
7688 /* Calculate the result when the argument is a constant. */
7689 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7690 return res;
7692 /* Optimize cosh(-x) into cosh (x). */
7693 if ((narg = fold_strip_sign_ops (arg)))
7694 return build_call_expr (fndecl, 1, narg);
7697 return NULL_TREE;
7700 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7701 Return NULL_TREE if no simplification can be made. */
7703 static tree
7704 fold_builtin_tan (tree arg, tree type)
7706 enum built_in_function fcode;
7707 tree res;
7709 if (!validate_arg (arg, REAL_TYPE))
7710 return NULL_TREE;
7712 /* Calculate the result when the argument is a constant. */
7713 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7714 return res;
7716 /* Optimize tan(atan(x)) = x. */
7717 fcode = builtin_mathfn_code (arg);
7718 if (flag_unsafe_math_optimizations
7719 && (fcode == BUILT_IN_ATAN
7720 || fcode == BUILT_IN_ATANF
7721 || fcode == BUILT_IN_ATANL))
7722 return CALL_EXPR_ARG (arg, 0);
7724 return NULL_TREE;
7727 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7728 NULL_TREE if no simplification can be made. */
7730 static tree
7731 fold_builtin_sincos (tree arg0, tree arg1, tree arg2)
7733 tree type;
7734 tree res, fn, call;
7736 if (!validate_arg (arg0, REAL_TYPE)
7737 || !validate_arg (arg1, POINTER_TYPE)
7738 || !validate_arg (arg2, POINTER_TYPE))
7739 return NULL_TREE;
7741 type = TREE_TYPE (arg0);
7743 /* Calculate the result when the argument is a constant. */
7744 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7745 return res;
7747 /* Canonicalize sincos to cexpi. */
7748 if (!TARGET_C99_FUNCTIONS)
7749 return NULL_TREE;
7750 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7751 if (!fn)
7752 return NULL_TREE;
7754 call = build_call_expr (fn, 1, arg0);
7755 call = builtin_save_expr (call);
7757 return build2 (COMPOUND_EXPR, type,
7758 build2 (MODIFY_EXPR, void_type_node,
7759 build_fold_indirect_ref (arg1),
7760 build1 (IMAGPART_EXPR, type, call)),
7761 build2 (MODIFY_EXPR, void_type_node,
7762 build_fold_indirect_ref (arg2),
7763 build1 (REALPART_EXPR, type, call)));
7766 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7767 NULL_TREE if no simplification can be made. */
7769 static tree
7770 fold_builtin_cexp (tree arg0, tree type)
7772 tree rtype;
7773 tree realp, imagp, ifn;
7775 if (!validate_arg (arg0, COMPLEX_TYPE))
7776 return NULL_TREE;
7778 rtype = TREE_TYPE (TREE_TYPE (arg0));
7780 /* In case we can figure out the real part of arg0 and it is constant zero
7781 fold to cexpi. */
7782 if (!TARGET_C99_FUNCTIONS)
7783 return NULL_TREE;
7784 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7785 if (!ifn)
7786 return NULL_TREE;
7788 if ((realp = fold_unary (REALPART_EXPR, rtype, arg0))
7789 && real_zerop (realp))
7791 tree narg = fold_build1 (IMAGPART_EXPR, rtype, arg0);
7792 return build_call_expr (ifn, 1, narg);
7795 /* In case we can easily decompose real and imaginary parts split cexp
7796 to exp (r) * cexpi (i). */
7797 if (flag_unsafe_math_optimizations
7798 && realp)
7800 tree rfn, rcall, icall;
7802 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7803 if (!rfn)
7804 return NULL_TREE;
7806 imagp = fold_unary (IMAGPART_EXPR, rtype, arg0);
7807 if (!imagp)
7808 return NULL_TREE;
7810 icall = build_call_expr (ifn, 1, imagp);
7811 icall = builtin_save_expr (icall);
7812 rcall = build_call_expr (rfn, 1, realp);
7813 rcall = builtin_save_expr (rcall);
7814 return fold_build2 (COMPLEX_EXPR, type,
7815 fold_build2 (MULT_EXPR, rtype,
7816 rcall,
7817 fold_build1 (REALPART_EXPR, rtype, icall)),
7818 fold_build2 (MULT_EXPR, rtype,
7819 rcall,
7820 fold_build1 (IMAGPART_EXPR, rtype, icall)));
7823 return NULL_TREE;
7826 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7827 Return NULL_TREE if no simplification can be made. */
7829 static tree
7830 fold_builtin_trunc (tree fndecl, tree arg)
7832 if (!validate_arg (arg, REAL_TYPE))
7833 return NULL_TREE;
7835 /* Optimize trunc of constant value. */
7836 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7838 REAL_VALUE_TYPE r, x;
7839 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7841 x = TREE_REAL_CST (arg);
7842 real_trunc (&r, TYPE_MODE (type), &x);
7843 return build_real (type, r);
7846 return fold_trunc_transparent_mathfn (fndecl, arg);
7849 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7850 Return NULL_TREE if no simplification can be made. */
7852 static tree
7853 fold_builtin_floor (tree fndecl, tree arg)
7855 if (!validate_arg (arg, REAL_TYPE))
7856 return NULL_TREE;
7858 /* Optimize floor of constant value. */
7859 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7861 REAL_VALUE_TYPE x;
7863 x = TREE_REAL_CST (arg);
7864 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7866 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7867 REAL_VALUE_TYPE r;
7869 real_floor (&r, TYPE_MODE (type), &x);
7870 return build_real (type, r);
7874 /* Fold floor (x) where x is nonnegative to trunc (x). */
7875 if (tree_expr_nonnegative_p (arg))
7877 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7878 if (truncfn)
7879 return build_call_expr (truncfn, 1, arg);
7882 return fold_trunc_transparent_mathfn (fndecl, arg);
7885 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7886 Return NULL_TREE if no simplification can be made. */
7888 static tree
7889 fold_builtin_ceil (tree fndecl, tree arg)
7891 if (!validate_arg (arg, REAL_TYPE))
7892 return NULL_TREE;
7894 /* Optimize ceil of constant value. */
7895 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7897 REAL_VALUE_TYPE x;
7899 x = TREE_REAL_CST (arg);
7900 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7902 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7903 REAL_VALUE_TYPE r;
7905 real_ceil (&r, TYPE_MODE (type), &x);
7906 return build_real (type, r);
7910 return fold_trunc_transparent_mathfn (fndecl, arg);
7913 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7914 Return NULL_TREE if no simplification can be made. */
7916 static tree
7917 fold_builtin_round (tree fndecl, tree arg)
7919 if (!validate_arg (arg, REAL_TYPE))
7920 return NULL_TREE;
7922 /* Optimize round of constant value. */
7923 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7925 REAL_VALUE_TYPE x;
7927 x = TREE_REAL_CST (arg);
7928 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7930 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7931 REAL_VALUE_TYPE r;
7933 real_round (&r, TYPE_MODE (type), &x);
7934 return build_real (type, r);
7938 return fold_trunc_transparent_mathfn (fndecl, arg);
7941 /* Fold function call to builtin lround, lroundf or lroundl (or the
7942 corresponding long long versions) and other rounding functions. ARG
7943 is the argument to the call. Return NULL_TREE if no simplification
7944 can be made. */
7946 static tree
7947 fold_builtin_int_roundingfn (tree fndecl, tree arg)
7949 if (!validate_arg (arg, REAL_TYPE))
7950 return NULL_TREE;
7952 /* Optimize lround of constant value. */
7953 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7955 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7957 if (real_isfinite (&x))
7959 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
7960 tree ftype = TREE_TYPE (arg);
7961 unsigned HOST_WIDE_INT lo2;
7962 HOST_WIDE_INT hi, lo;
7963 REAL_VALUE_TYPE r;
7965 switch (DECL_FUNCTION_CODE (fndecl))
7967 CASE_FLT_FN (BUILT_IN_LFLOOR):
7968 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7969 real_floor (&r, TYPE_MODE (ftype), &x);
7970 break;
7972 CASE_FLT_FN (BUILT_IN_LCEIL):
7973 CASE_FLT_FN (BUILT_IN_LLCEIL):
7974 real_ceil (&r, TYPE_MODE (ftype), &x);
7975 break;
7977 CASE_FLT_FN (BUILT_IN_LROUND):
7978 CASE_FLT_FN (BUILT_IN_LLROUND):
7979 real_round (&r, TYPE_MODE (ftype), &x);
7980 break;
7982 default:
7983 gcc_unreachable ();
7986 REAL_VALUE_TO_INT (&lo, &hi, r);
7987 if (!fit_double_type (lo, hi, &lo2, &hi, itype))
7988 return build_int_cst_wide (itype, lo2, hi);
7992 switch (DECL_FUNCTION_CODE (fndecl))
7994 CASE_FLT_FN (BUILT_IN_LFLOOR):
7995 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7996 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7997 if (tree_expr_nonnegative_p (arg))
7998 return fold_build1 (FIX_TRUNC_EXPR, TREE_TYPE (TREE_TYPE (fndecl)),
7999 arg);
8000 break;
8001 default:;
8004 return fold_fixed_mathfn (fndecl, arg);
8007 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8008 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8009 the argument to the call. Return NULL_TREE if no simplification can
8010 be made. */
8012 static tree
8013 fold_builtin_bitop (tree fndecl, tree arg)
8015 if (!validate_arg (arg, INTEGER_TYPE))
8016 return NULL_TREE;
8018 /* Optimize for constant argument. */
8019 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8021 HOST_WIDE_INT hi, width, result;
8022 unsigned HOST_WIDE_INT lo;
8023 tree type;
8025 type = TREE_TYPE (arg);
8026 width = TYPE_PRECISION (type);
8027 lo = TREE_INT_CST_LOW (arg);
8029 /* Clear all the bits that are beyond the type's precision. */
8030 if (width > HOST_BITS_PER_WIDE_INT)
8032 hi = TREE_INT_CST_HIGH (arg);
8033 if (width < 2 * HOST_BITS_PER_WIDE_INT)
8034 hi &= ~((HOST_WIDE_INT) (-1) >> (width - HOST_BITS_PER_WIDE_INT));
8036 else
8038 hi = 0;
8039 if (width < HOST_BITS_PER_WIDE_INT)
8040 lo &= ~((unsigned HOST_WIDE_INT) (-1) << width);
8043 switch (DECL_FUNCTION_CODE (fndecl))
8045 CASE_INT_FN (BUILT_IN_FFS):
8046 if (lo != 0)
8047 result = exact_log2 (lo & -lo) + 1;
8048 else if (hi != 0)
8049 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi) + 1;
8050 else
8051 result = 0;
8052 break;
8054 CASE_INT_FN (BUILT_IN_CLZ):
8055 if (hi != 0)
8056 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8057 else if (lo != 0)
8058 result = width - floor_log2 (lo) - 1;
8059 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8060 result = width;
8061 break;
8063 CASE_INT_FN (BUILT_IN_CTZ):
8064 if (lo != 0)
8065 result = exact_log2 (lo & -lo);
8066 else if (hi != 0)
8067 result = HOST_BITS_PER_WIDE_INT + exact_log2 (hi & -hi);
8068 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8069 result = width;
8070 break;
8072 CASE_INT_FN (BUILT_IN_POPCOUNT):
8073 result = 0;
8074 while (lo)
8075 result++, lo &= lo - 1;
8076 while (hi)
8077 result++, hi &= hi - 1;
8078 break;
8080 CASE_INT_FN (BUILT_IN_PARITY):
8081 result = 0;
8082 while (lo)
8083 result++, lo &= lo - 1;
8084 while (hi)
8085 result++, hi &= hi - 1;
8086 result &= 1;
8087 break;
8089 default:
8090 gcc_unreachable ();
8093 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8096 return NULL_TREE;
8099 /* Fold function call to builtin_bswap and the long and long long
8100 variants. Return NULL_TREE if no simplification can be made. */
8101 static tree
8102 fold_builtin_bswap (tree fndecl, tree arg)
8104 if (! validate_arg (arg, INTEGER_TYPE))
8105 return NULL_TREE;
8107 /* Optimize constant value. */
8108 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8110 HOST_WIDE_INT hi, width, r_hi = 0;
8111 unsigned HOST_WIDE_INT lo, r_lo = 0;
8112 tree type;
8114 type = TREE_TYPE (arg);
8115 width = TYPE_PRECISION (type);
8116 lo = TREE_INT_CST_LOW (arg);
8117 hi = TREE_INT_CST_HIGH (arg);
8119 switch (DECL_FUNCTION_CODE (fndecl))
8121 case BUILT_IN_BSWAP32:
8122 case BUILT_IN_BSWAP64:
8124 int s;
8126 for (s = 0; s < width; s += 8)
8128 int d = width - s - 8;
8129 unsigned HOST_WIDE_INT byte;
8131 if (s < HOST_BITS_PER_WIDE_INT)
8132 byte = (lo >> s) & 0xff;
8133 else
8134 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8136 if (d < HOST_BITS_PER_WIDE_INT)
8137 r_lo |= byte << d;
8138 else
8139 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8143 break;
8145 default:
8146 gcc_unreachable ();
8149 if (width < HOST_BITS_PER_WIDE_INT)
8150 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), r_lo);
8151 else
8152 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl)), r_lo, r_hi);
8155 return NULL_TREE;
8158 /* Return true if EXPR is the real constant contained in VALUE. */
8160 static bool
8161 real_dconstp (tree expr, const REAL_VALUE_TYPE *value)
8163 STRIP_NOPS (expr);
8165 return ((TREE_CODE (expr) == REAL_CST
8166 && !TREE_OVERFLOW (expr)
8167 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr), *value))
8168 || (TREE_CODE (expr) == COMPLEX_CST
8169 && real_dconstp (TREE_REALPART (expr), value)
8170 && real_zerop (TREE_IMAGPART (expr))));
8173 /* A subroutine of fold_builtin to fold the various logarithmic
8174 functions. Return NULL_TREE if no simplification can me made.
8175 FUNC is the corresponding MPFR logarithm function. */
8177 static tree
8178 fold_builtin_logarithm (tree fndecl, tree arg,
8179 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8181 if (validate_arg (arg, REAL_TYPE))
8183 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8184 tree res;
8185 const enum built_in_function fcode = builtin_mathfn_code (arg);
8187 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8188 instead we'll look for 'e' truncated to MODE. So only do
8189 this if flag_unsafe_math_optimizations is set. */
8190 if (flag_unsafe_math_optimizations && func == mpfr_log)
8192 const REAL_VALUE_TYPE e_truncated =
8193 real_value_truncate (TYPE_MODE (type), *get_real_const (rv_e));
8194 if (real_dconstp (arg, &e_truncated))
8195 return build_real (type, dconst1);
8198 /* Calculate the result when the argument is a constant. */
8199 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8200 return res;
8202 /* Special case, optimize logN(expN(x)) = x. */
8203 if (flag_unsafe_math_optimizations
8204 && ((func == mpfr_log
8205 && (fcode == BUILT_IN_EXP
8206 || fcode == BUILT_IN_EXPF
8207 || fcode == BUILT_IN_EXPL))
8208 || (func == mpfr_log2
8209 && (fcode == BUILT_IN_EXP2
8210 || fcode == BUILT_IN_EXP2F
8211 || fcode == BUILT_IN_EXP2L))
8212 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8213 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8215 /* Optimize logN(func()) for various exponential functions. We
8216 want to determine the value "x" and the power "exponent" in
8217 order to transform logN(x**exponent) into exponent*logN(x). */
8218 if (flag_unsafe_math_optimizations)
8220 tree exponent = 0, x = 0;
8222 switch (fcode)
8224 CASE_FLT_FN (BUILT_IN_EXP):
8225 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8226 x = build_real (type,
8227 real_value_truncate (TYPE_MODE (type),
8228 *get_real_const (rv_e)));
8229 exponent = CALL_EXPR_ARG (arg, 0);
8230 break;
8231 CASE_FLT_FN (BUILT_IN_EXP2):
8232 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8233 x = build_real (type, dconst2);
8234 exponent = CALL_EXPR_ARG (arg, 0);
8235 break;
8236 CASE_FLT_FN (BUILT_IN_EXP10):
8237 CASE_FLT_FN (BUILT_IN_POW10):
8238 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8240 REAL_VALUE_TYPE dconst10;
8241 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8242 x = build_real (type, dconst10);
8244 exponent = CALL_EXPR_ARG (arg, 0);
8245 break;
8246 CASE_FLT_FN (BUILT_IN_SQRT):
8247 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8248 x = CALL_EXPR_ARG (arg, 0);
8249 exponent = build_real (type, dconsthalf);
8250 break;
8251 CASE_FLT_FN (BUILT_IN_CBRT):
8252 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8253 x = CALL_EXPR_ARG (arg, 0);
8254 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8255 *get_real_const (rv_third)));
8256 break;
8257 CASE_FLT_FN (BUILT_IN_POW):
8258 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8259 x = CALL_EXPR_ARG (arg, 0);
8260 exponent = CALL_EXPR_ARG (arg, 1);
8261 break;
8262 default:
8263 break;
8266 /* Now perform the optimization. */
8267 if (x && exponent)
8269 tree logfn = build_call_expr (fndecl, 1, x);
8270 return fold_build2 (MULT_EXPR, type, exponent, logfn);
8275 return NULL_TREE;
8278 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8279 NULL_TREE if no simplification can be made. */
8281 static tree
8282 fold_builtin_hypot (tree fndecl, tree arg0, tree arg1, tree type)
8284 tree res, narg0, narg1;
8286 if (!validate_arg (arg0, REAL_TYPE)
8287 || !validate_arg (arg1, REAL_TYPE))
8288 return NULL_TREE;
8290 /* Calculate the result when the argument is a constant. */
8291 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8292 return res;
8294 /* If either argument to hypot has a negate or abs, strip that off.
8295 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8296 narg0 = fold_strip_sign_ops (arg0);
8297 narg1 = fold_strip_sign_ops (arg1);
8298 if (narg0 || narg1)
8300 return build_call_expr (fndecl, 2, narg0 ? narg0 : arg0,
8301 narg1 ? narg1 : arg1);
8304 /* If either argument is zero, hypot is fabs of the other. */
8305 if (real_zerop (arg0))
8306 return fold_build1 (ABS_EXPR, type, arg1);
8307 else if (real_zerop (arg1))
8308 return fold_build1 (ABS_EXPR, type, arg0);
8310 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8311 if (flag_unsafe_math_optimizations
8312 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8314 const REAL_VALUE_TYPE sqrt2_trunc
8315 = real_value_truncate (TYPE_MODE (type), *get_real_const (rv_sqrt2));
8316 return fold_build2 (MULT_EXPR, type,
8317 fold_build1 (ABS_EXPR, type, arg0),
8318 build_real (type, sqrt2_trunc));
8321 return NULL_TREE;
8325 /* Fold a builtin function call to pow, powf, or powl. Return
8326 NULL_TREE if no simplification can be made. */
8327 static tree
8328 fold_builtin_pow (tree fndecl, tree arg0, tree arg1, tree type)
8330 tree res;
8332 if (!validate_arg (arg0, REAL_TYPE)
8333 || !validate_arg (arg1, REAL_TYPE))
8334 return NULL_TREE;
8336 /* Calculate the result when the argument is a constant. */
8337 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8338 return res;
8340 /* Optimize pow(1.0,y) = 1.0. */
8341 if (real_onep (arg0))
8342 return omit_one_operand (type, build_real (type, dconst1), arg1);
8344 if (TREE_CODE (arg1) == REAL_CST
8345 && !TREE_OVERFLOW (arg1))
8347 REAL_VALUE_TYPE cint;
8348 REAL_VALUE_TYPE c;
8349 HOST_WIDE_INT n;
8351 c = TREE_REAL_CST (arg1);
8353 /* Optimize pow(x,0.0) = 1.0. */
8354 if (REAL_VALUES_EQUAL (c, dconst0))
8355 return omit_one_operand (type, build_real (type, dconst1),
8356 arg0);
8358 /* Optimize pow(x,1.0) = x. */
8359 if (REAL_VALUES_EQUAL (c, dconst1))
8360 return arg0;
8362 /* Optimize pow(x,-1.0) = 1.0/x. */
8363 if (REAL_VALUES_EQUAL (c, dconstm1))
8364 return fold_build2 (RDIV_EXPR, type,
8365 build_real (type, dconst1), arg0);
8367 /* Optimize pow(x,0.5) = sqrt(x). */
8368 if (flag_unsafe_math_optimizations
8369 && REAL_VALUES_EQUAL (c, dconsthalf))
8371 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8373 if (sqrtfn != NULL_TREE)
8374 return build_call_expr (sqrtfn, 1, arg0);
8377 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8378 if (flag_unsafe_math_optimizations)
8380 const REAL_VALUE_TYPE dconstroot
8381 = real_value_truncate (TYPE_MODE (type),
8382 *get_real_const (rv_third));
8384 if (REAL_VALUES_EQUAL (c, dconstroot))
8386 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8387 if (cbrtfn != NULL_TREE)
8388 return build_call_expr (cbrtfn, 1, arg0);
8392 /* Check for an integer exponent. */
8393 n = real_to_integer (&c);
8394 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8395 if (real_identical (&c, &cint))
8397 /* Attempt to evaluate pow at compile-time. */
8398 if (TREE_CODE (arg0) == REAL_CST
8399 && !TREE_OVERFLOW (arg0))
8401 REAL_VALUE_TYPE x;
8402 bool inexact;
8404 x = TREE_REAL_CST (arg0);
8405 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8406 if (flag_unsafe_math_optimizations || !inexact)
8407 return build_real (type, x);
8410 /* Strip sign ops from even integer powers. */
8411 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8413 tree narg0 = fold_strip_sign_ops (arg0);
8414 if (narg0)
8415 return build_call_expr (fndecl, 2, narg0, arg1);
8420 if (flag_unsafe_math_optimizations)
8422 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8424 /* Optimize pow(expN(x),y) = expN(x*y). */
8425 if (BUILTIN_EXPONENT_P (fcode))
8427 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8428 tree arg = CALL_EXPR_ARG (arg0, 0);
8429 arg = fold_build2 (MULT_EXPR, type, arg, arg1);
8430 return build_call_expr (expfn, 1, arg);
8433 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8434 if (BUILTIN_SQRT_P (fcode))
8436 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8437 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8438 build_real (type, dconsthalf));
8439 return build_call_expr (fndecl, 2, narg0, narg1);
8442 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8443 if (BUILTIN_CBRT_P (fcode))
8445 tree arg = CALL_EXPR_ARG (arg0, 0);
8446 if (tree_expr_nonnegative_p (arg))
8448 const REAL_VALUE_TYPE dconstroot
8449 = real_value_truncate (TYPE_MODE (type),
8450 *get_real_const (rv_third));
8451 tree narg1 = fold_build2 (MULT_EXPR, type, arg1,
8452 build_real (type, dconstroot));
8453 return build_call_expr (fndecl, 2, arg, narg1);
8457 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8458 if (fcode == BUILT_IN_POW
8459 || fcode == BUILT_IN_POWF
8460 || fcode == BUILT_IN_POWL)
8462 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8463 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8464 tree narg1 = fold_build2 (MULT_EXPR, type, arg01, arg1);
8465 return build_call_expr (fndecl, 2, arg00, narg1);
8469 return NULL_TREE;
8472 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8473 Return NULL_TREE if no simplification can be made. */
8474 static tree
8475 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED,
8476 tree arg0, tree arg1, tree type)
8478 if (!validate_arg (arg0, REAL_TYPE)
8479 || !validate_arg (arg1, INTEGER_TYPE))
8480 return NULL_TREE;
8482 /* Optimize pow(1.0,y) = 1.0. */
8483 if (real_onep (arg0))
8484 return omit_one_operand (type, build_real (type, dconst1), arg1);
8486 if (host_integerp (arg1, 0))
8488 HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1);
8490 /* Evaluate powi at compile-time. */
8491 if (TREE_CODE (arg0) == REAL_CST
8492 && !TREE_OVERFLOW (arg0))
8494 REAL_VALUE_TYPE x;
8495 x = TREE_REAL_CST (arg0);
8496 real_powi (&x, TYPE_MODE (type), &x, c);
8497 return build_real (type, x);
8500 /* Optimize pow(x,0) = 1.0. */
8501 if (c == 0)
8502 return omit_one_operand (type, build_real (type, dconst1),
8503 arg0);
8505 /* Optimize pow(x,1) = x. */
8506 if (c == 1)
8507 return arg0;
8509 /* Optimize pow(x,-1) = 1.0/x. */
8510 if (c == -1)
8511 return fold_build2 (RDIV_EXPR, type,
8512 build_real (type, dconst1), arg0);
8515 return NULL_TREE;
8518 /* A subroutine of fold_builtin to fold the various exponent
8519 functions. Return NULL_TREE if no simplification can be made.
8520 FUNC is the corresponding MPFR exponent function. */
8522 static tree
8523 fold_builtin_exponent (tree fndecl, tree arg,
8524 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8526 if (validate_arg (arg, REAL_TYPE))
8528 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8529 tree res;
8531 /* Calculate the result when the argument is a constant. */
8532 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8533 return res;
8535 /* Optimize expN(logN(x)) = x. */
8536 if (flag_unsafe_math_optimizations)
8538 const enum built_in_function fcode = builtin_mathfn_code (arg);
8540 if ((func == mpfr_exp
8541 && (fcode == BUILT_IN_LOG
8542 || fcode == BUILT_IN_LOGF
8543 || fcode == BUILT_IN_LOGL))
8544 || (func == mpfr_exp2
8545 && (fcode == BUILT_IN_LOG2
8546 || fcode == BUILT_IN_LOG2F
8547 || fcode == BUILT_IN_LOG2L))
8548 || (func == mpfr_exp10
8549 && (fcode == BUILT_IN_LOG10
8550 || fcode == BUILT_IN_LOG10F
8551 || fcode == BUILT_IN_LOG10L)))
8552 return fold_convert (type, CALL_EXPR_ARG (arg, 0));
8556 return NULL_TREE;
8559 /* Return true if VAR is a VAR_DECL or a component thereof. */
8561 static bool
8562 var_decl_component_p (tree var)
8564 tree inner = var;
8565 while (handled_component_p (inner))
8566 inner = TREE_OPERAND (inner, 0);
8567 return SSA_VAR_P (inner);
8570 /* Fold function call to builtin memset. Return
8571 NULL_TREE if no simplification can be made. */
8573 static tree
8574 fold_builtin_memset (tree dest, tree c, tree len, tree type, bool ignore)
8576 tree var, ret;
8577 unsigned HOST_WIDE_INT length, cval;
8579 if (! validate_arg (dest, POINTER_TYPE)
8580 || ! validate_arg (c, INTEGER_TYPE)
8581 || ! validate_arg (len, INTEGER_TYPE))
8582 return NULL_TREE;
8584 if (! host_integerp (len, 1))
8585 return NULL_TREE;
8587 /* If the LEN parameter is zero, return DEST. */
8588 if (integer_zerop (len))
8589 return omit_one_operand (type, dest, c);
8591 if (! host_integerp (c, 1) || TREE_SIDE_EFFECTS (dest))
8592 return NULL_TREE;
8594 var = dest;
8595 STRIP_NOPS (var);
8596 if (TREE_CODE (var) != ADDR_EXPR)
8597 return NULL_TREE;
8599 var = TREE_OPERAND (var, 0);
8600 if (TREE_THIS_VOLATILE (var))
8601 return NULL_TREE;
8603 if (!INTEGRAL_TYPE_P (TREE_TYPE (var))
8604 && !POINTER_TYPE_P (TREE_TYPE (var)))
8605 return NULL_TREE;
8607 if (! var_decl_component_p (var))
8608 return NULL_TREE;
8610 length = tree_low_cst (len, 1);
8611 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var))) != length
8612 || get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT
8613 < (int) length)
8614 return NULL_TREE;
8616 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8617 return NULL_TREE;
8619 if (integer_zerop (c))
8620 cval = 0;
8621 else
8623 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8624 return NULL_TREE;
8626 cval = tree_low_cst (c, 1);
8627 cval &= 0xff;
8628 cval |= cval << 8;
8629 cval |= cval << 16;
8630 cval |= (cval << 31) << 1;
8633 ret = build_int_cst_type (TREE_TYPE (var), cval);
8634 ret = build2 (MODIFY_EXPR, TREE_TYPE (var), var, ret);
8635 if (ignore)
8636 return ret;
8638 return omit_one_operand (type, dest, ret);
8641 /* Fold function call to builtin memset. Return
8642 NULL_TREE if no simplification can be made. */
8644 static tree
8645 fold_builtin_bzero (tree dest, tree size, bool ignore)
8647 if (! validate_arg (dest, POINTER_TYPE)
8648 || ! validate_arg (size, INTEGER_TYPE))
8649 return NULL_TREE;
8651 if (!ignore)
8652 return NULL_TREE;
8654 /* New argument list transforming bzero(ptr x, int y) to
8655 memset(ptr x, int 0, size_t y). This is done this way
8656 so that if it isn't expanded inline, we fallback to
8657 calling bzero instead of memset. */
8659 return fold_builtin_memset (dest, integer_zero_node,
8660 fold_convert (sizetype, size),
8661 void_type_node, ignore);
8664 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8665 NULL_TREE if no simplification can be made.
8666 If ENDP is 0, return DEST (like memcpy).
8667 If ENDP is 1, return DEST+LEN (like mempcpy).
8668 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8669 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8670 (memmove). */
8672 static tree
8673 fold_builtin_memory_op (tree dest, tree src, tree len, tree type, bool ignore, int endp)
8675 tree destvar, srcvar, expr;
8677 if (! validate_arg (dest, POINTER_TYPE)
8678 || ! validate_arg (src, POINTER_TYPE)
8679 || ! validate_arg (len, INTEGER_TYPE))
8680 return NULL_TREE;
8682 /* If the LEN parameter is zero, return DEST. */
8683 if (integer_zerop (len))
8684 return omit_one_operand (type, dest, src);
8686 /* If SRC and DEST are the same (and not volatile), return
8687 DEST{,+LEN,+LEN-1}. */
8688 if (operand_equal_p (src, dest, 0))
8689 expr = len;
8690 else
8692 tree srctype, desttype;
8693 if (endp == 3)
8695 int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
8696 int dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
8698 /* Both DEST and SRC must be pointer types.
8699 ??? This is what old code did. Is the testing for pointer types
8700 really mandatory?
8702 If either SRC is readonly or length is 1, we can use memcpy. */
8703 if (dest_align && src_align
8704 && (readonly_data_expr (src)
8705 || (host_integerp (len, 1)
8706 && (MIN (src_align, dest_align) / BITS_PER_UNIT >=
8707 tree_low_cst (len, 1)))))
8709 tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8710 if (!fn)
8711 return NULL_TREE;
8712 return build_call_expr (fn, 3, dest, src, len);
8714 return NULL_TREE;
8717 if (!host_integerp (len, 0))
8718 return NULL_TREE;
8719 /* FIXME:
8720 This logic lose for arguments like (type *)malloc (sizeof (type)),
8721 since we strip the casts of up to VOID return value from malloc.
8722 Perhaps we ought to inherit type from non-VOID argument here? */
8723 STRIP_NOPS (src);
8724 STRIP_NOPS (dest);
8725 srctype = TREE_TYPE (TREE_TYPE (src));
8726 desttype = TREE_TYPE (TREE_TYPE (dest));
8727 if (!srctype || !desttype
8728 || !TYPE_SIZE_UNIT (srctype)
8729 || !TYPE_SIZE_UNIT (desttype)
8730 || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
8731 || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
8732 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
8733 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8734 return NULL_TREE;
8736 if (get_pointer_alignment (dest, BIGGEST_ALIGNMENT)
8737 < (int) TYPE_ALIGN (desttype)
8738 || (get_pointer_alignment (src, BIGGEST_ALIGNMENT)
8739 < (int) TYPE_ALIGN (srctype)))
8740 return NULL_TREE;
8742 if (!ignore)
8743 dest = builtin_save_expr (dest);
8745 srcvar = build_fold_indirect_ref (src);
8746 if (TREE_THIS_VOLATILE (srcvar))
8747 return NULL_TREE;
8748 if (!tree_int_cst_equal (lang_hooks.expr_size (srcvar), len))
8749 return NULL_TREE;
8750 /* With memcpy, it is possible to bypass aliasing rules, so without
8751 this check i.e. execute/20060930-2.c would be misoptimized, because
8752 it use conflicting alias set to hold argument for the memcpy call.
8753 This check is probably unnecessary with -fno-strict-aliasing.
8754 Similarly for destvar. See also PR29286. */
8755 if (!var_decl_component_p (srcvar)
8756 /* Accept: memcpy (*char_var, "test", 1); that simplify
8757 to char_var='t'; */
8758 || is_gimple_min_invariant (srcvar)
8759 || readonly_data_expr (src))
8760 return NULL_TREE;
8762 destvar = build_fold_indirect_ref (dest);
8763 if (TREE_THIS_VOLATILE (destvar))
8764 return NULL_TREE;
8765 if (!tree_int_cst_equal (lang_hooks.expr_size (destvar), len))
8766 return NULL_TREE;
8767 if (!var_decl_component_p (destvar))
8768 return NULL_TREE;
8770 if (srctype == desttype
8771 || (gimple_in_ssa_p (cfun)
8772 && useless_type_conversion_p (desttype, srctype)))
8773 expr = srcvar;
8774 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
8775 || POINTER_TYPE_P (TREE_TYPE (srcvar)))
8776 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
8777 || POINTER_TYPE_P (TREE_TYPE (destvar))))
8778 expr = fold_convert (TREE_TYPE (destvar), srcvar);
8779 else
8780 expr = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (destvar), srcvar);
8781 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
8784 if (ignore)
8785 return expr;
8787 if (endp == 0 || endp == 3)
8788 return omit_one_operand (type, dest, expr);
8790 if (expr == len)
8791 expr = NULL_TREE;
8793 if (endp == 2)
8794 len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
8795 ssize_int (1));
8797 dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
8798 dest = fold_convert (type, dest);
8799 if (expr)
8800 dest = omit_one_operand (type, dest, expr);
8801 return dest;
8804 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8805 If LEN is not NULL, it represents the length of the string to be
8806 copied. Return NULL_TREE if no simplification can be made. */
8808 tree
8809 fold_builtin_strcpy (tree fndecl, tree dest, tree src, tree len)
8811 tree fn;
8813 if (!validate_arg (dest, POINTER_TYPE)
8814 || !validate_arg (src, POINTER_TYPE))
8815 return NULL_TREE;
8817 /* If SRC and DEST are the same (and not volatile), return DEST. */
8818 if (operand_equal_p (src, dest, 0))
8819 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
8821 if (optimize_size)
8822 return NULL_TREE;
8824 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8825 if (!fn)
8826 return NULL_TREE;
8828 if (!len)
8830 len = c_strlen (src, 1);
8831 if (! len || TREE_SIDE_EFFECTS (len))
8832 return NULL_TREE;
8835 len = size_binop (PLUS_EXPR, len, ssize_int (1));
8836 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8837 build_call_expr (fn, 3, dest, src, len));
8840 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8841 If SLEN is not NULL, it represents the length of the source string.
8842 Return NULL_TREE if no simplification can be made. */
8844 tree
8845 fold_builtin_strncpy (tree fndecl, tree dest, tree src, tree len, tree slen)
8847 tree fn;
8849 if (!validate_arg (dest, POINTER_TYPE)
8850 || !validate_arg (src, POINTER_TYPE)
8851 || !validate_arg (len, INTEGER_TYPE))
8852 return NULL_TREE;
8854 /* If the LEN parameter is zero, return DEST. */
8855 if (integer_zerop (len))
8856 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
8858 /* We can't compare slen with len as constants below if len is not a
8859 constant. */
8860 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
8861 return NULL_TREE;
8863 if (!slen)
8864 slen = c_strlen (src, 1);
8866 /* Now, we must be passed a constant src ptr parameter. */
8867 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
8868 return NULL_TREE;
8870 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
8872 /* We do not support simplification of this case, though we do
8873 support it when expanding trees into RTL. */
8874 /* FIXME: generate a call to __builtin_memset. */
8875 if (tree_int_cst_lt (slen, len))
8876 return NULL_TREE;
8878 /* OK transform into builtin memcpy. */
8879 fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
8880 if (!fn)
8881 return NULL_TREE;
8882 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
8883 build_call_expr (fn, 3, dest, src, len));
8886 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8887 arguments to the call, and TYPE is its return type.
8888 Return NULL_TREE if no simplification can be made. */
8890 static tree
8891 fold_builtin_memchr (tree arg1, tree arg2, tree len, tree type)
8893 if (!validate_arg (arg1, POINTER_TYPE)
8894 || !validate_arg (arg2, INTEGER_TYPE)
8895 || !validate_arg (len, INTEGER_TYPE))
8896 return NULL_TREE;
8897 else
8899 const char *p1;
8901 if (TREE_CODE (arg2) != INTEGER_CST
8902 || !host_integerp (len, 1))
8903 return NULL_TREE;
8905 p1 = c_getstr (arg1);
8906 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8908 char c;
8909 const char *r;
8910 tree tem;
8912 if (target_char_cast (arg2, &c))
8913 return NULL_TREE;
8915 r = (char *) memchr (p1, c, tree_low_cst (len, 1));
8917 if (r == NULL)
8918 return build_int_cst (TREE_TYPE (arg1), 0);
8920 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
8921 size_int (r - p1));
8922 return fold_convert (type, tem);
8924 return NULL_TREE;
8928 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8929 Return NULL_TREE if no simplification can be made. */
8931 static tree
8932 fold_builtin_memcmp (tree arg1, tree arg2, tree len)
8934 const char *p1, *p2;
8936 if (!validate_arg (arg1, POINTER_TYPE)
8937 || !validate_arg (arg2, POINTER_TYPE)
8938 || !validate_arg (len, INTEGER_TYPE))
8939 return NULL_TREE;
8941 /* If the LEN parameter is zero, return zero. */
8942 if (integer_zerop (len))
8943 return omit_two_operands (integer_type_node, integer_zero_node,
8944 arg1, arg2);
8946 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8947 if (operand_equal_p (arg1, arg2, 0))
8948 return omit_one_operand (integer_type_node, integer_zero_node, len);
8950 p1 = c_getstr (arg1);
8951 p2 = c_getstr (arg2);
8953 /* If all arguments are constant, and the value of len is not greater
8954 than the lengths of arg1 and arg2, evaluate at compile-time. */
8955 if (host_integerp (len, 1) && p1 && p2
8956 && compare_tree_int (len, strlen (p1) + 1) <= 0
8957 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8959 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
8961 if (r > 0)
8962 return integer_one_node;
8963 else if (r < 0)
8964 return integer_minus_one_node;
8965 else
8966 return integer_zero_node;
8969 /* If len parameter is one, return an expression corresponding to
8970 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8971 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
8973 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8974 tree cst_uchar_ptr_node
8975 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8977 tree ind1 = fold_convert (integer_type_node,
8978 build1 (INDIRECT_REF, cst_uchar_node,
8979 fold_convert (cst_uchar_ptr_node,
8980 arg1)));
8981 tree ind2 = fold_convert (integer_type_node,
8982 build1 (INDIRECT_REF, cst_uchar_node,
8983 fold_convert (cst_uchar_ptr_node,
8984 arg2)));
8985 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
8988 return NULL_TREE;
8991 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8992 Return NULL_TREE if no simplification can be made. */
8994 static tree
8995 fold_builtin_strcmp (tree arg1, tree arg2)
8997 const char *p1, *p2;
8999 if (!validate_arg (arg1, POINTER_TYPE)
9000 || !validate_arg (arg2, POINTER_TYPE))
9001 return NULL_TREE;
9003 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9004 if (operand_equal_p (arg1, arg2, 0))
9005 return integer_zero_node;
9007 p1 = c_getstr (arg1);
9008 p2 = c_getstr (arg2);
9010 if (p1 && p2)
9012 const int i = strcmp (p1, p2);
9013 if (i < 0)
9014 return integer_minus_one_node;
9015 else if (i > 0)
9016 return integer_one_node;
9017 else
9018 return integer_zero_node;
9021 /* If the second arg is "", return *(const unsigned char*)arg1. */
9022 if (p2 && *p2 == '\0')
9024 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9025 tree cst_uchar_ptr_node
9026 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9028 return fold_convert (integer_type_node,
9029 build1 (INDIRECT_REF, cst_uchar_node,
9030 fold_convert (cst_uchar_ptr_node,
9031 arg1)));
9034 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9035 if (p1 && *p1 == '\0')
9037 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9038 tree cst_uchar_ptr_node
9039 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9041 tree temp = fold_convert (integer_type_node,
9042 build1 (INDIRECT_REF, cst_uchar_node,
9043 fold_convert (cst_uchar_ptr_node,
9044 arg2)));
9045 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9048 return NULL_TREE;
9051 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9052 Return NULL_TREE if no simplification can be made. */
9054 static tree
9055 fold_builtin_strncmp (tree arg1, tree arg2, tree len)
9057 const char *p1, *p2;
9059 if (!validate_arg (arg1, POINTER_TYPE)
9060 || !validate_arg (arg2, POINTER_TYPE)
9061 || !validate_arg (len, INTEGER_TYPE))
9062 return NULL_TREE;
9064 /* If the LEN parameter is zero, return zero. */
9065 if (integer_zerop (len))
9066 return omit_two_operands (integer_type_node, integer_zero_node,
9067 arg1, arg2);
9069 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9070 if (operand_equal_p (arg1, arg2, 0))
9071 return omit_one_operand (integer_type_node, integer_zero_node, len);
9073 p1 = c_getstr (arg1);
9074 p2 = c_getstr (arg2);
9076 if (host_integerp (len, 1) && p1 && p2)
9078 const int i = strncmp (p1, p2, tree_low_cst (len, 1));
9079 if (i > 0)
9080 return integer_one_node;
9081 else if (i < 0)
9082 return integer_minus_one_node;
9083 else
9084 return integer_zero_node;
9087 /* If the second arg is "", and the length is greater than zero,
9088 return *(const unsigned char*)arg1. */
9089 if (p2 && *p2 == '\0'
9090 && TREE_CODE (len) == INTEGER_CST
9091 && tree_int_cst_sgn (len) == 1)
9093 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9094 tree cst_uchar_ptr_node
9095 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9097 return fold_convert (integer_type_node,
9098 build1 (INDIRECT_REF, cst_uchar_node,
9099 fold_convert (cst_uchar_ptr_node,
9100 arg1)));
9103 /* If the first arg is "", and the length is greater than zero,
9104 return -*(const unsigned char*)arg2. */
9105 if (p1 && *p1 == '\0'
9106 && TREE_CODE (len) == INTEGER_CST
9107 && tree_int_cst_sgn (len) == 1)
9109 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9110 tree cst_uchar_ptr_node
9111 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9113 tree temp = fold_convert (integer_type_node,
9114 build1 (INDIRECT_REF, cst_uchar_node,
9115 fold_convert (cst_uchar_ptr_node,
9116 arg2)));
9117 return fold_build1 (NEGATE_EXPR, integer_type_node, temp);
9120 /* If len parameter is one, return an expression corresponding to
9121 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9122 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
9124 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9125 tree cst_uchar_ptr_node
9126 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9128 tree ind1 = fold_convert (integer_type_node,
9129 build1 (INDIRECT_REF, cst_uchar_node,
9130 fold_convert (cst_uchar_ptr_node,
9131 arg1)));
9132 tree ind2 = fold_convert (integer_type_node,
9133 build1 (INDIRECT_REF, cst_uchar_node,
9134 fold_convert (cst_uchar_ptr_node,
9135 arg2)));
9136 return fold_build2 (MINUS_EXPR, integer_type_node, ind1, ind2);
9139 return NULL_TREE;
9142 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9143 ARG. Return NULL_TREE if no simplification can be made. */
9145 static tree
9146 fold_builtin_signbit (tree arg, tree type)
9148 tree temp;
9150 if (!validate_arg (arg, REAL_TYPE))
9151 return NULL_TREE;
9153 /* If ARG is a compile-time constant, determine the result. */
9154 if (TREE_CODE (arg) == REAL_CST
9155 && !TREE_OVERFLOW (arg))
9157 REAL_VALUE_TYPE c;
9159 c = TREE_REAL_CST (arg);
9160 temp = REAL_VALUE_NEGATIVE (c) ? integer_one_node : integer_zero_node;
9161 return fold_convert (type, temp);
9164 /* If ARG is non-negative, the result is always zero. */
9165 if (tree_expr_nonnegative_p (arg))
9166 return omit_one_operand (type, integer_zero_node, arg);
9168 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9169 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9170 return fold_build2 (LT_EXPR, type, arg,
9171 build_real (TREE_TYPE (arg), dconst0));
9173 return NULL_TREE;
9176 /* Fold function call to builtin copysign, copysignf or copysignl with
9177 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9178 be made. */
9180 static tree
9181 fold_builtin_copysign (tree fndecl, tree arg1, tree arg2, tree type)
9183 tree tem;
9185 if (!validate_arg (arg1, REAL_TYPE)
9186 || !validate_arg (arg2, REAL_TYPE))
9187 return NULL_TREE;
9189 /* copysign(X,X) is X. */
9190 if (operand_equal_p (arg1, arg2, 0))
9191 return fold_convert (type, arg1);
9193 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9194 if (TREE_CODE (arg1) == REAL_CST
9195 && TREE_CODE (arg2) == REAL_CST
9196 && !TREE_OVERFLOW (arg1)
9197 && !TREE_OVERFLOW (arg2))
9199 REAL_VALUE_TYPE c1, c2;
9201 c1 = TREE_REAL_CST (arg1);
9202 c2 = TREE_REAL_CST (arg2);
9203 /* c1.sign := c2.sign. */
9204 real_copysign (&c1, &c2);
9205 return build_real (type, c1);
9208 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9209 Remember to evaluate Y for side-effects. */
9210 if (tree_expr_nonnegative_p (arg2))
9211 return omit_one_operand (type,
9212 fold_build1 (ABS_EXPR, type, arg1),
9213 arg2);
9215 /* Strip sign changing operations for the first argument. */
9216 tem = fold_strip_sign_ops (arg1);
9217 if (tem)
9218 return build_call_expr (fndecl, 2, tem, arg2);
9220 return NULL_TREE;
9223 /* Fold a call to builtin isascii with argument ARG. */
9225 static tree
9226 fold_builtin_isascii (tree arg)
9228 if (!validate_arg (arg, INTEGER_TYPE))
9229 return NULL_TREE;
9230 else
9232 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9233 arg = build2 (BIT_AND_EXPR, integer_type_node, arg,
9234 build_int_cst (NULL_TREE,
9235 ~ (unsigned HOST_WIDE_INT) 0x7f));
9236 return fold_build2 (EQ_EXPR, integer_type_node,
9237 arg, integer_zero_node);
9241 /* Fold a call to builtin toascii with argument ARG. */
9243 static tree
9244 fold_builtin_toascii (tree arg)
9246 if (!validate_arg (arg, INTEGER_TYPE))
9247 return NULL_TREE;
9249 /* Transform toascii(c) -> (c & 0x7f). */
9250 return fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9251 build_int_cst (NULL_TREE, 0x7f));
9254 /* Fold a call to builtin isdigit with argument ARG. */
9256 static tree
9257 fold_builtin_isdigit (tree arg)
9259 if (!validate_arg (arg, INTEGER_TYPE))
9260 return NULL_TREE;
9261 else
9263 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9264 /* According to the C standard, isdigit is unaffected by locale.
9265 However, it definitely is affected by the target character set. */
9266 unsigned HOST_WIDE_INT target_digit0
9267 = lang_hooks.to_target_charset ('0');
9269 if (target_digit0 == 0)
9270 return NULL_TREE;
9272 arg = fold_convert (unsigned_type_node, arg);
9273 arg = build2 (MINUS_EXPR, unsigned_type_node, arg,
9274 build_int_cst (unsigned_type_node, target_digit0));
9275 return fold_build2 (LE_EXPR, integer_type_node, arg,
9276 build_int_cst (unsigned_type_node, 9));
9280 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9282 static tree
9283 fold_builtin_fabs (tree arg, tree type)
9285 if (!validate_arg (arg, REAL_TYPE))
9286 return NULL_TREE;
9288 arg = fold_convert (type, arg);
9289 if (TREE_CODE (arg) == REAL_CST)
9290 return fold_abs_const (arg, type);
9291 return fold_build1 (ABS_EXPR, type, arg);
9294 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9296 static tree
9297 fold_builtin_abs (tree arg, tree type)
9299 if (!validate_arg (arg, INTEGER_TYPE))
9300 return NULL_TREE;
9302 arg = fold_convert (type, arg);
9303 if (TREE_CODE (arg) == INTEGER_CST)
9304 return fold_abs_const (arg, type);
9305 return fold_build1 (ABS_EXPR, type, arg);
9308 /* Fold a call to builtin fmin or fmax. */
9310 static tree
9311 fold_builtin_fmin_fmax (tree arg0, tree arg1, tree type, bool max)
9313 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9315 /* Calculate the result when the argument is a constant. */
9316 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9318 if (res)
9319 return res;
9321 /* If either argument is NaN, return the other one. Avoid the
9322 transformation if we get (and honor) a signalling NaN. Using
9323 omit_one_operand() ensures we create a non-lvalue. */
9324 if (TREE_CODE (arg0) == REAL_CST
9325 && real_isnan (&TREE_REAL_CST (arg0))
9326 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9327 || ! TREE_REAL_CST (arg0).signalling))
9328 return omit_one_operand (type, arg1, arg0);
9329 if (TREE_CODE (arg1) == REAL_CST
9330 && real_isnan (&TREE_REAL_CST (arg1))
9331 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9332 || ! TREE_REAL_CST (arg1).signalling))
9333 return omit_one_operand (type, arg0, arg1);
9335 /* Transform fmin/fmax(x,x) -> x. */
9336 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9337 return omit_one_operand (type, arg0, arg1);
9339 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9340 functions to return the numeric arg if the other one is NaN.
9341 These tree codes don't honor that, so only transform if
9342 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9343 handled, so we don't have to worry about it either. */
9344 if (flag_finite_math_only)
9345 return fold_build2 ((max ? MAX_EXPR : MIN_EXPR), type,
9346 fold_convert (type, arg0),
9347 fold_convert (type, arg1));
9349 return NULL_TREE;
9352 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9354 static tree
9355 fold_builtin_carg (tree arg, tree type)
9357 if (validate_arg (arg, COMPLEX_TYPE))
9359 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9361 if (atan2_fn)
9363 tree new_arg = builtin_save_expr (arg);
9364 tree r_arg = fold_build1 (REALPART_EXPR, type, new_arg);
9365 tree i_arg = fold_build1 (IMAGPART_EXPR, type, new_arg);
9366 return build_call_expr (atan2_fn, 2, i_arg, r_arg);
9370 return NULL_TREE;
9373 /* Fold a call to builtin logb/ilogb. */
9375 static tree
9376 fold_builtin_logb (tree arg, tree rettype)
9378 if (! validate_arg (arg, REAL_TYPE))
9379 return NULL_TREE;
9381 STRIP_NOPS (arg);
9383 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9385 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9387 switch (value->cl)
9389 case rvc_nan:
9390 case rvc_inf:
9391 /* If arg is Inf or NaN and we're logb, return it. */
9392 if (TREE_CODE (rettype) == REAL_TYPE)
9393 return fold_convert (rettype, arg);
9394 /* Fall through... */
9395 case rvc_zero:
9396 /* Zero may set errno and/or raise an exception for logb, also
9397 for ilogb we don't know FP_ILOGB0. */
9398 return NULL_TREE;
9399 case rvc_normal:
9400 /* For normal numbers, proceed iff radix == 2. In GCC,
9401 normalized significands are in the range [0.5, 1.0). We
9402 want the exponent as if they were [1.0, 2.0) so get the
9403 exponent and subtract 1. */
9404 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9405 return fold_convert (rettype, build_int_cst (NULL_TREE,
9406 REAL_EXP (value)-1));
9407 break;
9411 return NULL_TREE;
9414 /* Fold a call to builtin significand, if radix == 2. */
9416 static tree
9417 fold_builtin_significand (tree arg, tree rettype)
9419 if (! validate_arg (arg, REAL_TYPE))
9420 return NULL_TREE;
9422 STRIP_NOPS (arg);
9424 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9426 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9428 switch (value->cl)
9430 case rvc_zero:
9431 case rvc_nan:
9432 case rvc_inf:
9433 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9434 return fold_convert (rettype, arg);
9435 case rvc_normal:
9436 /* For normal numbers, proceed iff radix == 2. */
9437 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9439 REAL_VALUE_TYPE result = *value;
9440 /* In GCC, normalized significands are in the range [0.5,
9441 1.0). We want them to be [1.0, 2.0) so set the
9442 exponent to 1. */
9443 SET_REAL_EXP (&result, 1);
9444 return build_real (rettype, result);
9446 break;
9450 return NULL_TREE;
9453 /* Fold a call to builtin frexp, we can assume the base is 2. */
9455 static tree
9456 fold_builtin_frexp (tree arg0, tree arg1, tree rettype)
9458 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9459 return NULL_TREE;
9461 STRIP_NOPS (arg0);
9463 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9464 return NULL_TREE;
9466 arg1 = build_fold_indirect_ref (arg1);
9468 /* Proceed if a valid pointer type was passed in. */
9469 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9471 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9472 tree frac, exp;
9474 switch (value->cl)
9476 case rvc_zero:
9477 /* For +-0, return (*exp = 0, +-0). */
9478 exp = integer_zero_node;
9479 frac = arg0;
9480 break;
9481 case rvc_nan:
9482 case rvc_inf:
9483 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9484 return omit_one_operand (rettype, arg0, arg1);
9485 case rvc_normal:
9487 /* Since the frexp function always expects base 2, and in
9488 GCC normalized significands are already in the range
9489 [0.5, 1.0), we have exactly what frexp wants. */
9490 REAL_VALUE_TYPE frac_rvt = *value;
9491 SET_REAL_EXP (&frac_rvt, 0);
9492 frac = build_real (rettype, frac_rvt);
9493 exp = build_int_cst (NULL_TREE, REAL_EXP (value));
9495 break;
9496 default:
9497 gcc_unreachable ();
9500 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9501 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1, exp);
9502 TREE_SIDE_EFFECTS (arg1) = 1;
9503 return fold_build2 (COMPOUND_EXPR, rettype, arg1, frac);
9506 return NULL_TREE;
9509 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9510 then we can assume the base is two. If it's false, then we have to
9511 check the mode of the TYPE parameter in certain cases. */
9513 static tree
9514 fold_builtin_load_exponent (tree arg0, tree arg1, tree type, bool ldexp)
9516 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9518 STRIP_NOPS (arg0);
9519 STRIP_NOPS (arg1);
9521 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9522 if (real_zerop (arg0) || integer_zerop (arg1)
9523 || (TREE_CODE (arg0) == REAL_CST
9524 && !real_isfinite (&TREE_REAL_CST (arg0))))
9525 return omit_one_operand (type, arg0, arg1);
9527 /* If both arguments are constant, then try to evaluate it. */
9528 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9529 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9530 && host_integerp (arg1, 0))
9532 /* Bound the maximum adjustment to twice the range of the
9533 mode's valid exponents. Use abs to ensure the range is
9534 positive as a sanity check. */
9535 const long max_exp_adj = 2 *
9536 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9537 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9539 /* Get the user-requested adjustment. */
9540 const HOST_WIDE_INT req_exp_adj = tree_low_cst (arg1, 0);
9542 /* The requested adjustment must be inside this range. This
9543 is a preliminary cap to avoid things like overflow, we
9544 may still fail to compute the result for other reasons. */
9545 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9547 REAL_VALUE_TYPE initial_result;
9549 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9551 /* Ensure we didn't overflow. */
9552 if (! real_isinf (&initial_result))
9554 const REAL_VALUE_TYPE trunc_result
9555 = real_value_truncate (TYPE_MODE (type), initial_result);
9557 /* Only proceed if the target mode can hold the
9558 resulting value. */
9559 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9560 return build_real (type, trunc_result);
9566 return NULL_TREE;
9569 /* Fold a call to builtin modf. */
9571 static tree
9572 fold_builtin_modf (tree arg0, tree arg1, tree rettype)
9574 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9575 return NULL_TREE;
9577 STRIP_NOPS (arg0);
9579 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9580 return NULL_TREE;
9582 arg1 = build_fold_indirect_ref (arg1);
9584 /* Proceed if a valid pointer type was passed in. */
9585 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9587 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9588 REAL_VALUE_TYPE trunc, frac;
9590 switch (value->cl)
9592 case rvc_nan:
9593 case rvc_zero:
9594 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9595 trunc = frac = *value;
9596 break;
9597 case rvc_inf:
9598 /* For +-Inf, return (*arg1 = arg0, +-0). */
9599 frac = dconst0;
9600 frac.sign = value->sign;
9601 trunc = *value;
9602 break;
9603 case rvc_normal:
9604 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9605 real_trunc (&trunc, VOIDmode, value);
9606 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9607 /* If the original number was negative and already
9608 integral, then the fractional part is -0.0. */
9609 if (value->sign && frac.cl == rvc_zero)
9610 frac.sign = value->sign;
9611 break;
9614 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9615 arg1 = fold_build2 (MODIFY_EXPR, rettype, arg1,
9616 build_real (rettype, trunc));
9617 TREE_SIDE_EFFECTS (arg1) = 1;
9618 return fold_build2 (COMPOUND_EXPR, rettype, arg1,
9619 build_real (rettype, frac));
9622 return NULL_TREE;
9625 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9626 ARG is the argument for the call. */
9628 static tree
9629 fold_builtin_classify (tree fndecl, tree arg, int builtin_index)
9631 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9632 REAL_VALUE_TYPE r;
9634 if (!validate_arg (arg, REAL_TYPE))
9635 return NULL_TREE;
9637 switch (builtin_index)
9639 case BUILT_IN_ISINF:
9640 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9641 return omit_one_operand (type, integer_zero_node, arg);
9643 if (TREE_CODE (arg) == REAL_CST)
9645 r = TREE_REAL_CST (arg);
9646 if (real_isinf (&r))
9647 return real_compare (GT_EXPR, &r, &dconst0)
9648 ? integer_one_node : integer_minus_one_node;
9649 else
9650 return integer_zero_node;
9653 return NULL_TREE;
9655 case BUILT_IN_ISINF_SIGN:
9657 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9658 /* In a boolean context, GCC will fold the inner COND_EXPR to
9659 1. So e.g. "if (isinf_sign(x))" would be folded to just
9660 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9661 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
9662 tree isinf_fn = built_in_decls[BUILT_IN_ISINF];
9663 tree tmp = NULL_TREE;
9665 arg = builtin_save_expr (arg);
9667 if (signbit_fn && isinf_fn)
9669 tree signbit_call = build_call_expr (signbit_fn, 1, arg);
9670 tree isinf_call = build_call_expr (isinf_fn, 1, arg);
9672 signbit_call = fold_build2 (NE_EXPR, integer_type_node,
9673 signbit_call, integer_zero_node);
9674 isinf_call = fold_build2 (NE_EXPR, integer_type_node,
9675 isinf_call, integer_zero_node);
9677 tmp = fold_build3 (COND_EXPR, integer_type_node, signbit_call,
9678 integer_minus_one_node, integer_one_node);
9679 tmp = fold_build3 (COND_EXPR, integer_type_node, isinf_call, tmp,
9680 integer_zero_node);
9683 return tmp;
9686 case BUILT_IN_ISFINITE:
9687 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
9688 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
9689 return omit_one_operand (type, integer_one_node, arg);
9691 if (TREE_CODE (arg) == REAL_CST)
9693 r = TREE_REAL_CST (arg);
9694 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
9697 return NULL_TREE;
9699 case BUILT_IN_ISNAN:
9700 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
9701 return omit_one_operand (type, integer_zero_node, arg);
9703 if (TREE_CODE (arg) == REAL_CST)
9705 r = TREE_REAL_CST (arg);
9706 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9709 arg = builtin_save_expr (arg);
9710 return fold_build2 (UNORDERED_EXPR, type, arg, arg);
9712 default:
9713 gcc_unreachable ();
9717 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9718 This builtin will generate code to return the appropriate floating
9719 point classification depending on the value of the floating point
9720 number passed in. The possible return values must be supplied as
9721 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9722 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9723 one floating point argument which is "type generic". */
9725 static tree
9726 fold_builtin_fpclassify (tree exp)
9728 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9729 arg, type, res, tmp;
9730 enum machine_mode mode;
9731 REAL_VALUE_TYPE r;
9732 char buf[128];
9734 /* Verify the required arguments in the original call. */
9735 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
9736 INTEGER_TYPE, INTEGER_TYPE,
9737 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
9738 return NULL_TREE;
9740 fp_nan = CALL_EXPR_ARG (exp, 0);
9741 fp_infinite = CALL_EXPR_ARG (exp, 1);
9742 fp_normal = CALL_EXPR_ARG (exp, 2);
9743 fp_subnormal = CALL_EXPR_ARG (exp, 3);
9744 fp_zero = CALL_EXPR_ARG (exp, 4);
9745 arg = CALL_EXPR_ARG (exp, 5);
9746 type = TREE_TYPE (arg);
9747 mode = TYPE_MODE (type);
9748 arg = builtin_save_expr (fold_build1 (ABS_EXPR, type, arg));
9750 /* fpclassify(x) ->
9751 isnan(x) ? FP_NAN :
9752 (fabs(x) == Inf ? FP_INFINITE :
9753 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9754 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9756 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9757 build_real (type, dconst0));
9758 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_zero, fp_subnormal);
9760 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9761 real_from_string (&r, buf);
9762 tmp = fold_build2 (GE_EXPR, integer_type_node, arg, build_real (type, r));
9763 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_normal, res);
9765 if (HONOR_INFINITIES (mode))
9767 real_inf (&r);
9768 tmp = fold_build2 (EQ_EXPR, integer_type_node, arg,
9769 build_real (type, r));
9770 res = fold_build3 (COND_EXPR, integer_type_node, tmp, fp_infinite, res);
9773 if (HONOR_NANS (mode))
9775 tmp = fold_build2 (ORDERED_EXPR, integer_type_node, arg, arg);
9776 res = fold_build3 (COND_EXPR, integer_type_node, tmp, res, fp_nan);
9779 return res;
9782 /* Fold a call to an unordered comparison function such as
9783 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9784 being called and ARG0 and ARG1 are the arguments for the call.
9785 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9786 the opposite of the desired result. UNORDERED_CODE is used
9787 for modes that can hold NaNs and ORDERED_CODE is used for
9788 the rest. */
9790 static tree
9791 fold_builtin_unordered_cmp (tree fndecl, tree arg0, tree arg1,
9792 enum tree_code unordered_code,
9793 enum tree_code ordered_code)
9795 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9796 enum tree_code code;
9797 tree type0, type1;
9798 enum tree_code code0, code1;
9799 tree cmp_type = NULL_TREE;
9801 type0 = TREE_TYPE (arg0);
9802 type1 = TREE_TYPE (arg1);
9804 code0 = TREE_CODE (type0);
9805 code1 = TREE_CODE (type1);
9807 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9808 /* Choose the wider of two real types. */
9809 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9810 ? type0 : type1;
9811 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9812 cmp_type = type0;
9813 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9814 cmp_type = type1;
9816 arg0 = fold_convert (cmp_type, arg0);
9817 arg1 = fold_convert (cmp_type, arg1);
9819 if (unordered_code == UNORDERED_EXPR)
9821 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9822 return omit_two_operands (type, integer_zero_node, arg0, arg1);
9823 return fold_build2 (UNORDERED_EXPR, type, arg0, arg1);
9826 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
9827 : ordered_code;
9828 return fold_build1 (TRUTH_NOT_EXPR, type,
9829 fold_build2 (code, type, arg0, arg1));
9832 /* Fold a call to built-in function FNDECL with 0 arguments.
9833 IGNORE is true if the result of the function call is ignored. This
9834 function returns NULL_TREE if no simplification was possible. */
9836 static tree
9837 fold_builtin_0 (tree fndecl, bool ignore ATTRIBUTE_UNUSED)
9839 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9840 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9841 switch (fcode)
9843 CASE_FLT_FN (BUILT_IN_INF):
9844 case BUILT_IN_INFD32:
9845 case BUILT_IN_INFD64:
9846 case BUILT_IN_INFD128:
9847 return fold_builtin_inf (type, true);
9849 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9850 return fold_builtin_inf (type, false);
9852 case BUILT_IN_CLASSIFY_TYPE:
9853 return fold_builtin_classify_type (NULL_TREE);
9855 default:
9856 break;
9858 return NULL_TREE;
9861 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9862 IGNORE is true if the result of the function call is ignored. This
9863 function returns NULL_TREE if no simplification was possible. */
9865 static tree
9866 fold_builtin_1 (tree fndecl, tree arg0, bool ignore)
9868 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9869 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9870 switch (fcode)
9873 case BUILT_IN_CONSTANT_P:
9875 tree val = fold_builtin_constant_p (arg0);
9877 /* Gimplification will pull the CALL_EXPR for the builtin out of
9878 an if condition. When not optimizing, we'll not CSE it back.
9879 To avoid link error types of regressions, return false now. */
9880 if (!val && !optimize)
9881 val = integer_zero_node;
9883 return val;
9886 case BUILT_IN_CLASSIFY_TYPE:
9887 return fold_builtin_classify_type (arg0);
9889 case BUILT_IN_STRLEN:
9890 return fold_builtin_strlen (arg0);
9892 CASE_FLT_FN (BUILT_IN_FABS):
9893 return fold_builtin_fabs (arg0, type);
9895 case BUILT_IN_ABS:
9896 case BUILT_IN_LABS:
9897 case BUILT_IN_LLABS:
9898 case BUILT_IN_IMAXABS:
9899 return fold_builtin_abs (arg0, type);
9901 CASE_FLT_FN (BUILT_IN_CONJ):
9902 if (validate_arg (arg0, COMPLEX_TYPE))
9903 return fold_build1 (CONJ_EXPR, type, arg0);
9904 break;
9906 CASE_FLT_FN (BUILT_IN_CREAL):
9907 if (validate_arg (arg0, COMPLEX_TYPE))
9908 return non_lvalue (fold_build1 (REALPART_EXPR, type, arg0));;
9909 break;
9911 CASE_FLT_FN (BUILT_IN_CIMAG):
9912 if (validate_arg (arg0, COMPLEX_TYPE))
9913 return non_lvalue (fold_build1 (IMAGPART_EXPR, type, arg0));
9914 break;
9916 CASE_FLT_FN (BUILT_IN_CCOS):
9917 CASE_FLT_FN (BUILT_IN_CCOSH):
9918 /* These functions are "even", i.e. f(x) == f(-x). */
9919 if (validate_arg (arg0, COMPLEX_TYPE))
9921 tree narg = fold_strip_sign_ops (arg0);
9922 if (narg)
9923 return build_call_expr (fndecl, 1, narg);
9925 break;
9927 CASE_FLT_FN (BUILT_IN_CABS):
9928 return fold_builtin_cabs (arg0, type, fndecl);
9930 CASE_FLT_FN (BUILT_IN_CARG):
9931 return fold_builtin_carg (arg0, type);
9933 CASE_FLT_FN (BUILT_IN_SQRT):
9934 return fold_builtin_sqrt (arg0, type);
9936 CASE_FLT_FN (BUILT_IN_CBRT):
9937 return fold_builtin_cbrt (arg0, type);
9939 CASE_FLT_FN (BUILT_IN_ASIN):
9940 if (validate_arg (arg0, REAL_TYPE))
9941 return do_mpfr_arg1 (arg0, type, mpfr_asin,
9942 &dconstm1, &dconst1, true);
9943 break;
9945 CASE_FLT_FN (BUILT_IN_ACOS):
9946 if (validate_arg (arg0, REAL_TYPE))
9947 return do_mpfr_arg1 (arg0, type, mpfr_acos,
9948 &dconstm1, &dconst1, true);
9949 break;
9951 CASE_FLT_FN (BUILT_IN_ATAN):
9952 if (validate_arg (arg0, REAL_TYPE))
9953 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
9954 break;
9956 CASE_FLT_FN (BUILT_IN_ASINH):
9957 if (validate_arg (arg0, REAL_TYPE))
9958 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
9959 break;
9961 CASE_FLT_FN (BUILT_IN_ACOSH):
9962 if (validate_arg (arg0, REAL_TYPE))
9963 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
9964 &dconst1, NULL, true);
9965 break;
9967 CASE_FLT_FN (BUILT_IN_ATANH):
9968 if (validate_arg (arg0, REAL_TYPE))
9969 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
9970 &dconstm1, &dconst1, false);
9971 break;
9973 CASE_FLT_FN (BUILT_IN_SIN):
9974 if (validate_arg (arg0, REAL_TYPE))
9975 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
9976 break;
9978 CASE_FLT_FN (BUILT_IN_COS):
9979 return fold_builtin_cos (arg0, type, fndecl);
9980 break;
9982 CASE_FLT_FN (BUILT_IN_TAN):
9983 return fold_builtin_tan (arg0, type);
9985 CASE_FLT_FN (BUILT_IN_CEXP):
9986 return fold_builtin_cexp (arg0, type);
9988 CASE_FLT_FN (BUILT_IN_CEXPI):
9989 if (validate_arg (arg0, REAL_TYPE))
9990 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9991 break;
9993 CASE_FLT_FN (BUILT_IN_SINH):
9994 if (validate_arg (arg0, REAL_TYPE))
9995 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
9996 break;
9998 CASE_FLT_FN (BUILT_IN_COSH):
9999 return fold_builtin_cosh (arg0, type, fndecl);
10001 CASE_FLT_FN (BUILT_IN_TANH):
10002 if (validate_arg (arg0, REAL_TYPE))
10003 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10004 break;
10006 CASE_FLT_FN (BUILT_IN_ERF):
10007 if (validate_arg (arg0, REAL_TYPE))
10008 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10009 break;
10011 CASE_FLT_FN (BUILT_IN_ERFC):
10012 if (validate_arg (arg0, REAL_TYPE))
10013 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10014 break;
10016 CASE_FLT_FN (BUILT_IN_TGAMMA):
10017 if (validate_arg (arg0, REAL_TYPE))
10018 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10019 break;
10021 CASE_FLT_FN (BUILT_IN_EXP):
10022 return fold_builtin_exponent (fndecl, arg0, mpfr_exp);
10024 CASE_FLT_FN (BUILT_IN_EXP2):
10025 return fold_builtin_exponent (fndecl, arg0, mpfr_exp2);
10027 CASE_FLT_FN (BUILT_IN_EXP10):
10028 CASE_FLT_FN (BUILT_IN_POW10):
10029 return fold_builtin_exponent (fndecl, arg0, mpfr_exp10);
10031 CASE_FLT_FN (BUILT_IN_EXPM1):
10032 if (validate_arg (arg0, REAL_TYPE))
10033 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10034 break;
10036 CASE_FLT_FN (BUILT_IN_LOG):
10037 return fold_builtin_logarithm (fndecl, arg0, mpfr_log);
10039 CASE_FLT_FN (BUILT_IN_LOG2):
10040 return fold_builtin_logarithm (fndecl, arg0, mpfr_log2);
10042 CASE_FLT_FN (BUILT_IN_LOG10):
10043 return fold_builtin_logarithm (fndecl, arg0, mpfr_log10);
10045 CASE_FLT_FN (BUILT_IN_LOG1P):
10046 if (validate_arg (arg0, REAL_TYPE))
10047 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10048 &dconstm1, NULL, false);
10049 break;
10051 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10052 CASE_FLT_FN (BUILT_IN_J0):
10053 if (validate_arg (arg0, REAL_TYPE))
10054 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10055 NULL, NULL, 0);
10056 break;
10058 CASE_FLT_FN (BUILT_IN_J1):
10059 if (validate_arg (arg0, REAL_TYPE))
10060 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10061 NULL, NULL, 0);
10062 break;
10064 CASE_FLT_FN (BUILT_IN_Y0):
10065 if (validate_arg (arg0, REAL_TYPE))
10066 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10067 &dconst0, NULL, false);
10068 break;
10070 CASE_FLT_FN (BUILT_IN_Y1):
10071 if (validate_arg (arg0, REAL_TYPE))
10072 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10073 &dconst0, NULL, false);
10074 break;
10075 #endif
10077 CASE_FLT_FN (BUILT_IN_NAN):
10078 case BUILT_IN_NAND32:
10079 case BUILT_IN_NAND64:
10080 case BUILT_IN_NAND128:
10081 return fold_builtin_nan (arg0, type, true);
10083 CASE_FLT_FN (BUILT_IN_NANS):
10084 return fold_builtin_nan (arg0, type, false);
10086 CASE_FLT_FN (BUILT_IN_FLOOR):
10087 return fold_builtin_floor (fndecl, arg0);
10089 CASE_FLT_FN (BUILT_IN_CEIL):
10090 return fold_builtin_ceil (fndecl, arg0);
10092 CASE_FLT_FN (BUILT_IN_TRUNC):
10093 return fold_builtin_trunc (fndecl, arg0);
10095 CASE_FLT_FN (BUILT_IN_ROUND):
10096 return fold_builtin_round (fndecl, arg0);
10098 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10099 CASE_FLT_FN (BUILT_IN_RINT):
10100 return fold_trunc_transparent_mathfn (fndecl, arg0);
10102 CASE_FLT_FN (BUILT_IN_LCEIL):
10103 CASE_FLT_FN (BUILT_IN_LLCEIL):
10104 CASE_FLT_FN (BUILT_IN_LFLOOR):
10105 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10106 CASE_FLT_FN (BUILT_IN_LROUND):
10107 CASE_FLT_FN (BUILT_IN_LLROUND):
10108 return fold_builtin_int_roundingfn (fndecl, arg0);
10110 CASE_FLT_FN (BUILT_IN_LRINT):
10111 CASE_FLT_FN (BUILT_IN_LLRINT):
10112 return fold_fixed_mathfn (fndecl, arg0);
10114 case BUILT_IN_BSWAP32:
10115 case BUILT_IN_BSWAP64:
10116 return fold_builtin_bswap (fndecl, arg0);
10118 CASE_INT_FN (BUILT_IN_FFS):
10119 CASE_INT_FN (BUILT_IN_CLZ):
10120 CASE_INT_FN (BUILT_IN_CTZ):
10121 CASE_INT_FN (BUILT_IN_POPCOUNT):
10122 CASE_INT_FN (BUILT_IN_PARITY):
10123 return fold_builtin_bitop (fndecl, arg0);
10125 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10126 return fold_builtin_signbit (arg0, type);
10128 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10129 return fold_builtin_significand (arg0, type);
10131 CASE_FLT_FN (BUILT_IN_ILOGB):
10132 CASE_FLT_FN (BUILT_IN_LOGB):
10133 return fold_builtin_logb (arg0, type);
10135 case BUILT_IN_ISASCII:
10136 return fold_builtin_isascii (arg0);
10138 case BUILT_IN_TOASCII:
10139 return fold_builtin_toascii (arg0);
10141 case BUILT_IN_ISDIGIT:
10142 return fold_builtin_isdigit (arg0);
10144 CASE_FLT_FN (BUILT_IN_FINITE):
10145 case BUILT_IN_FINITED32:
10146 case BUILT_IN_FINITED64:
10147 case BUILT_IN_FINITED128:
10148 case BUILT_IN_ISFINITE:
10149 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISFINITE);
10151 CASE_FLT_FN (BUILT_IN_ISINF):
10152 case BUILT_IN_ISINFD32:
10153 case BUILT_IN_ISINFD64:
10154 case BUILT_IN_ISINFD128:
10155 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF);
10157 case BUILT_IN_ISINF_SIGN:
10158 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISINF_SIGN);
10160 CASE_FLT_FN (BUILT_IN_ISNAN):
10161 case BUILT_IN_ISNAND32:
10162 case BUILT_IN_ISNAND64:
10163 case BUILT_IN_ISNAND128:
10164 return fold_builtin_classify (fndecl, arg0, BUILT_IN_ISNAN);
10166 case BUILT_IN_PRINTF:
10167 case BUILT_IN_PRINTF_UNLOCKED:
10168 case BUILT_IN_VPRINTF:
10169 return fold_builtin_printf (fndecl, arg0, NULL_TREE, ignore, fcode);
10171 default:
10172 break;
10175 return NULL_TREE;
10179 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10180 IGNORE is true if the result of the function call is ignored. This
10181 function returns NULL_TREE if no simplification was possible. */
10183 static tree
10184 fold_builtin_2 (tree fndecl, tree arg0, tree arg1, bool ignore)
10186 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10187 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10189 switch (fcode)
10191 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10192 CASE_FLT_FN (BUILT_IN_JN):
10193 if (validate_arg (arg0, INTEGER_TYPE)
10194 && validate_arg (arg1, REAL_TYPE))
10195 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10196 break;
10198 CASE_FLT_FN (BUILT_IN_YN):
10199 if (validate_arg (arg0, INTEGER_TYPE)
10200 && validate_arg (arg1, REAL_TYPE))
10201 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10202 &dconst0, false);
10203 break;
10205 CASE_FLT_FN (BUILT_IN_DREM):
10206 CASE_FLT_FN (BUILT_IN_REMAINDER):
10207 if (validate_arg (arg0, REAL_TYPE)
10208 && validate_arg(arg1, REAL_TYPE))
10209 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10210 break;
10212 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10213 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10214 if (validate_arg (arg0, REAL_TYPE)
10215 && validate_arg(arg1, POINTER_TYPE))
10216 return do_mpfr_lgamma_r (arg0, arg1, type);
10217 break;
10218 #endif
10220 CASE_FLT_FN (BUILT_IN_ATAN2):
10221 if (validate_arg (arg0, REAL_TYPE)
10222 && validate_arg(arg1, REAL_TYPE))
10223 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10224 break;
10226 CASE_FLT_FN (BUILT_IN_FDIM):
10227 if (validate_arg (arg0, REAL_TYPE)
10228 && validate_arg(arg1, REAL_TYPE))
10229 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10230 break;
10232 CASE_FLT_FN (BUILT_IN_HYPOT):
10233 return fold_builtin_hypot (fndecl, arg0, arg1, type);
10235 CASE_FLT_FN (BUILT_IN_LDEXP):
10236 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/true);
10237 CASE_FLT_FN (BUILT_IN_SCALBN):
10238 CASE_FLT_FN (BUILT_IN_SCALBLN):
10239 return fold_builtin_load_exponent (arg0, arg1, type, /*ldexp=*/false);
10241 CASE_FLT_FN (BUILT_IN_FREXP):
10242 return fold_builtin_frexp (arg0, arg1, type);
10244 CASE_FLT_FN (BUILT_IN_MODF):
10245 return fold_builtin_modf (arg0, arg1, type);
10247 case BUILT_IN_BZERO:
10248 return fold_builtin_bzero (arg0, arg1, ignore);
10250 case BUILT_IN_FPUTS:
10251 return fold_builtin_fputs (arg0, arg1, ignore, false, NULL_TREE);
10253 case BUILT_IN_FPUTS_UNLOCKED:
10254 return fold_builtin_fputs (arg0, arg1, ignore, true, NULL_TREE);
10256 case BUILT_IN_STRSTR:
10257 return fold_builtin_strstr (arg0, arg1, type);
10259 case BUILT_IN_STRCAT:
10260 return fold_builtin_strcat (arg0, arg1);
10262 case BUILT_IN_STRSPN:
10263 return fold_builtin_strspn (arg0, arg1);
10265 case BUILT_IN_STRCSPN:
10266 return fold_builtin_strcspn (arg0, arg1);
10268 case BUILT_IN_STRCHR:
10269 case BUILT_IN_INDEX:
10270 return fold_builtin_strchr (arg0, arg1, type);
10272 case BUILT_IN_STRRCHR:
10273 case BUILT_IN_RINDEX:
10274 return fold_builtin_strrchr (arg0, arg1, type);
10276 case BUILT_IN_STRCPY:
10277 return fold_builtin_strcpy (fndecl, arg0, arg1, NULL_TREE);
10279 case BUILT_IN_STRCMP:
10280 return fold_builtin_strcmp (arg0, arg1);
10282 case BUILT_IN_STRPBRK:
10283 return fold_builtin_strpbrk (arg0, arg1, type);
10285 case BUILT_IN_EXPECT:
10286 return fold_builtin_expect (arg0, arg1);
10288 CASE_FLT_FN (BUILT_IN_POW):
10289 return fold_builtin_pow (fndecl, arg0, arg1, type);
10291 CASE_FLT_FN (BUILT_IN_POWI):
10292 return fold_builtin_powi (fndecl, arg0, arg1, type);
10294 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10295 return fold_builtin_copysign (fndecl, arg0, arg1, type);
10297 CASE_FLT_FN (BUILT_IN_FMIN):
10298 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/false);
10300 CASE_FLT_FN (BUILT_IN_FMAX):
10301 return fold_builtin_fmin_fmax (arg0, arg1, type, /*max=*/true);
10303 case BUILT_IN_ISGREATER:
10304 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLE_EXPR, LE_EXPR);
10305 case BUILT_IN_ISGREATEREQUAL:
10306 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNLT_EXPR, LT_EXPR);
10307 case BUILT_IN_ISLESS:
10308 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGE_EXPR, GE_EXPR);
10309 case BUILT_IN_ISLESSEQUAL:
10310 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNGT_EXPR, GT_EXPR);
10311 case BUILT_IN_ISLESSGREATER:
10312 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10313 case BUILT_IN_ISUNORDERED:
10314 return fold_builtin_unordered_cmp (fndecl, arg0, arg1, UNORDERED_EXPR,
10315 NOP_EXPR);
10317 /* We do the folding for va_start in the expander. */
10318 case BUILT_IN_VA_START:
10319 break;
10321 case BUILT_IN_SPRINTF:
10322 return fold_builtin_sprintf (arg0, arg1, NULL_TREE, ignore);
10324 case BUILT_IN_OBJECT_SIZE:
10325 return fold_builtin_object_size (arg0, arg1);
10327 case BUILT_IN_PRINTF:
10328 case BUILT_IN_PRINTF_UNLOCKED:
10329 case BUILT_IN_VPRINTF:
10330 return fold_builtin_printf (fndecl, arg0, arg1, ignore, fcode);
10332 case BUILT_IN_PRINTF_CHK:
10333 case BUILT_IN_VPRINTF_CHK:
10334 if (!validate_arg (arg0, INTEGER_TYPE)
10335 || TREE_SIDE_EFFECTS (arg0))
10336 return NULL_TREE;
10337 else
10338 return fold_builtin_printf (fndecl, arg1, NULL_TREE, ignore, fcode);
10339 break;
10341 case BUILT_IN_FPRINTF:
10342 case BUILT_IN_FPRINTF_UNLOCKED:
10343 case BUILT_IN_VFPRINTF:
10344 return fold_builtin_fprintf (fndecl, arg0, arg1, NULL_TREE,
10345 ignore, fcode);
10347 default:
10348 break;
10350 return NULL_TREE;
10353 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10354 and ARG2. IGNORE is true if the result of the function call is ignored.
10355 This function returns NULL_TREE if no simplification was possible. */
10357 static tree
10358 fold_builtin_3 (tree fndecl, tree arg0, tree arg1, tree arg2, bool ignore)
10360 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10361 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10362 switch (fcode)
10365 CASE_FLT_FN (BUILT_IN_SINCOS):
10366 return fold_builtin_sincos (arg0, arg1, arg2);
10368 CASE_FLT_FN (BUILT_IN_FMA):
10369 if (validate_arg (arg0, REAL_TYPE)
10370 && validate_arg(arg1, REAL_TYPE)
10371 && validate_arg(arg2, REAL_TYPE))
10372 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
10373 break;
10375 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10376 CASE_FLT_FN (BUILT_IN_REMQUO):
10377 if (validate_arg (arg0, REAL_TYPE)
10378 && validate_arg(arg1, REAL_TYPE)
10379 && validate_arg(arg2, POINTER_TYPE))
10380 return do_mpfr_remquo (arg0, arg1, arg2);
10381 break;
10382 #endif
10384 case BUILT_IN_MEMSET:
10385 return fold_builtin_memset (arg0, arg1, arg2, type, ignore);
10387 case BUILT_IN_BCOPY:
10388 return fold_builtin_memory_op (arg1, arg0, arg2, void_type_node, true, /*endp=*/3);
10390 case BUILT_IN_MEMCPY:
10391 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/0);
10393 case BUILT_IN_MEMPCPY:
10394 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/1);
10396 case BUILT_IN_MEMMOVE:
10397 return fold_builtin_memory_op (arg0, arg1, arg2, type, ignore, /*endp=*/3);
10399 case BUILT_IN_STRNCAT:
10400 return fold_builtin_strncat (arg0, arg1, arg2);
10402 case BUILT_IN_STRNCPY:
10403 return fold_builtin_strncpy (fndecl, arg0, arg1, arg2, NULL_TREE);
10405 case BUILT_IN_STRNCMP:
10406 return fold_builtin_strncmp (arg0, arg1, arg2);
10408 case BUILT_IN_MEMCHR:
10409 return fold_builtin_memchr (arg0, arg1, arg2, type);
10411 case BUILT_IN_BCMP:
10412 case BUILT_IN_MEMCMP:
10413 return fold_builtin_memcmp (arg0, arg1, arg2);;
10415 case BUILT_IN_SPRINTF:
10416 return fold_builtin_sprintf (arg0, arg1, arg2, ignore);
10418 case BUILT_IN_STRCPY_CHK:
10419 case BUILT_IN_STPCPY_CHK:
10420 return fold_builtin_stxcpy_chk (fndecl, arg0, arg1, arg2, NULL_TREE,
10421 ignore, fcode);
10423 case BUILT_IN_STRCAT_CHK:
10424 return fold_builtin_strcat_chk (fndecl, arg0, arg1, arg2);
10426 case BUILT_IN_PRINTF_CHK:
10427 case BUILT_IN_VPRINTF_CHK:
10428 if (!validate_arg (arg0, INTEGER_TYPE)
10429 || TREE_SIDE_EFFECTS (arg0))
10430 return NULL_TREE;
10431 else
10432 return fold_builtin_printf (fndecl, arg1, arg2, ignore, fcode);
10433 break;
10435 case BUILT_IN_FPRINTF:
10436 case BUILT_IN_FPRINTF_UNLOCKED:
10437 case BUILT_IN_VFPRINTF:
10438 return fold_builtin_fprintf (fndecl, arg0, arg1, arg2, ignore, fcode);
10440 case BUILT_IN_FPRINTF_CHK:
10441 case BUILT_IN_VFPRINTF_CHK:
10442 if (!validate_arg (arg1, INTEGER_TYPE)
10443 || TREE_SIDE_EFFECTS (arg1))
10444 return NULL_TREE;
10445 else
10446 return fold_builtin_fprintf (fndecl, arg0, arg2, NULL_TREE,
10447 ignore, fcode);
10449 default:
10450 break;
10452 return NULL_TREE;
10455 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10456 ARG2, and ARG3. IGNORE is true if the result of the function call is
10457 ignored. This function returns NULL_TREE if no simplification was
10458 possible. */
10460 static tree
10461 fold_builtin_4 (tree fndecl, tree arg0, tree arg1, tree arg2, tree arg3,
10462 bool ignore)
10464 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10466 switch (fcode)
10468 case BUILT_IN_MEMCPY_CHK:
10469 case BUILT_IN_MEMPCPY_CHK:
10470 case BUILT_IN_MEMMOVE_CHK:
10471 case BUILT_IN_MEMSET_CHK:
10472 return fold_builtin_memory_chk (fndecl, arg0, arg1, arg2, arg3,
10473 NULL_TREE, ignore,
10474 DECL_FUNCTION_CODE (fndecl));
10476 case BUILT_IN_STRNCPY_CHK:
10477 return fold_builtin_strncpy_chk (arg0, arg1, arg2, arg3, NULL_TREE);
10479 case BUILT_IN_STRNCAT_CHK:
10480 return fold_builtin_strncat_chk (fndecl, arg0, arg1, arg2, arg3);
10482 case BUILT_IN_FPRINTF_CHK:
10483 case BUILT_IN_VFPRINTF_CHK:
10484 if (!validate_arg (arg1, INTEGER_TYPE)
10485 || TREE_SIDE_EFFECTS (arg1))
10486 return NULL_TREE;
10487 else
10488 return fold_builtin_fprintf (fndecl, arg0, arg2, arg3,
10489 ignore, fcode);
10490 break;
10492 default:
10493 break;
10495 return NULL_TREE;
10498 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10499 arguments, where NARGS <= 4. IGNORE is true if the result of the
10500 function call is ignored. This function returns NULL_TREE if no
10501 simplification was possible. Note that this only folds builtins with
10502 fixed argument patterns. Foldings that do varargs-to-varargs
10503 transformations, or that match calls with more than 4 arguments,
10504 need to be handled with fold_builtin_varargs instead. */
10506 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10508 static tree
10509 fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
10511 tree ret = NULL_TREE;
10513 switch (nargs)
10515 case 0:
10516 ret = fold_builtin_0 (fndecl, ignore);
10517 break;
10518 case 1:
10519 ret = fold_builtin_1 (fndecl, args[0], ignore);
10520 break;
10521 case 2:
10522 ret = fold_builtin_2 (fndecl, args[0], args[1], ignore);
10523 break;
10524 case 3:
10525 ret = fold_builtin_3 (fndecl, args[0], args[1], args[2], ignore);
10526 break;
10527 case 4:
10528 ret = fold_builtin_4 (fndecl, args[0], args[1], args[2], args[3],
10529 ignore);
10530 break;
10531 default:
10532 break;
10534 if (ret)
10536 ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
10537 TREE_NO_WARNING (ret) = 1;
10538 return ret;
10540 return NULL_TREE;
10543 /* Builtins with folding operations that operate on "..." arguments
10544 need special handling; we need to store the arguments in a convenient
10545 data structure before attempting any folding. Fortunately there are
10546 only a few builtins that fall into this category. FNDECL is the
10547 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10548 result of the function call is ignored. */
10550 static tree
10551 fold_builtin_varargs (tree fndecl, tree exp, bool ignore ATTRIBUTE_UNUSED)
10553 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10554 tree ret = NULL_TREE;
10556 switch (fcode)
10558 case BUILT_IN_SPRINTF_CHK:
10559 case BUILT_IN_VSPRINTF_CHK:
10560 ret = fold_builtin_sprintf_chk (exp, fcode);
10561 break;
10563 case BUILT_IN_SNPRINTF_CHK:
10564 case BUILT_IN_VSNPRINTF_CHK:
10565 ret = fold_builtin_snprintf_chk (exp, NULL_TREE, fcode);
10566 break;
10568 case BUILT_IN_FPCLASSIFY:
10569 ret = fold_builtin_fpclassify (exp);
10570 break;
10572 default:
10573 break;
10575 if (ret)
10577 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10578 TREE_NO_WARNING (ret) = 1;
10579 return ret;
10581 return NULL_TREE;
10584 /* A wrapper function for builtin folding that prevents warnings for
10585 "statement without effect" and the like, caused by removing the
10586 call node earlier than the warning is generated. */
10588 tree
10589 fold_call_expr (tree exp, bool ignore)
10591 tree ret = NULL_TREE;
10592 tree fndecl = get_callee_fndecl (exp);
10593 if (fndecl
10594 && TREE_CODE (fndecl) == FUNCTION_DECL
10595 && DECL_BUILT_IN (fndecl)
10596 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10597 yet. Defer folding until we see all the arguments
10598 (after inlining). */
10599 && !CALL_EXPR_VA_ARG_PACK (exp))
10601 int nargs = call_expr_nargs (exp);
10603 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10604 instead last argument is __builtin_va_arg_pack (). Defer folding
10605 even in that case, until arguments are finalized. */
10606 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10608 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10609 if (fndecl2
10610 && TREE_CODE (fndecl2) == FUNCTION_DECL
10611 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10612 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10613 return NULL_TREE;
10616 /* FIXME: Don't use a list in this interface. */
10617 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10618 return targetm.fold_builtin (fndecl, CALL_EXPR_ARGS (exp), ignore);
10619 else
10621 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
10623 tree *args = CALL_EXPR_ARGP (exp);
10624 ret = fold_builtin_n (fndecl, args, nargs, ignore);
10626 if (!ret)
10627 ret = fold_builtin_varargs (fndecl, exp, ignore);
10628 if (ret)
10630 /* Propagate location information from original call to
10631 expansion of builtin. Otherwise things like
10632 maybe_emit_chk_warning, that operate on the expansion
10633 of a builtin, will use the wrong location information. */
10634 if (CAN_HAVE_LOCATION_P (exp) && EXPR_HAS_LOCATION (exp))
10636 tree realret = ret;
10637 if (TREE_CODE (ret) == NOP_EXPR)
10638 realret = TREE_OPERAND (ret, 0);
10639 if (CAN_HAVE_LOCATION_P (realret)
10640 && !EXPR_HAS_LOCATION (realret))
10641 SET_EXPR_LOCATION (realret, EXPR_LOCATION (exp));
10642 return realret;
10644 return ret;
10648 return NULL_TREE;
10651 /* Conveniently construct a function call expression. FNDECL names the
10652 function to be called and ARGLIST is a TREE_LIST of arguments. */
10654 tree
10655 build_function_call_expr (tree fndecl, tree arglist)
10657 tree fntype = TREE_TYPE (fndecl);
10658 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10659 int n = list_length (arglist);
10660 tree *argarray = (tree *) alloca (n * sizeof (tree));
10661 int i;
10663 for (i = 0; i < n; i++, arglist = TREE_CHAIN (arglist))
10664 argarray[i] = TREE_VALUE (arglist);
10665 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10668 /* Conveniently construct a function call expression. FNDECL names the
10669 function to be called, N is the number of arguments, and the "..."
10670 parameters are the argument expressions. */
10672 tree
10673 build_call_expr (tree fndecl, int n, ...)
10675 va_list ap;
10676 tree fntype = TREE_TYPE (fndecl);
10677 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10678 tree *argarray = (tree *) alloca (n * sizeof (tree));
10679 int i;
10681 va_start (ap, n);
10682 for (i = 0; i < n; i++)
10683 argarray[i] = va_arg (ap, tree);
10684 va_end (ap);
10685 return fold_builtin_call_array (TREE_TYPE (fntype), fn, n, argarray);
10688 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10689 N arguments are passed in the array ARGARRAY. */
10691 tree
10692 fold_builtin_call_array (tree type,
10693 tree fn,
10694 int n,
10695 tree *argarray)
10697 tree ret = NULL_TREE;
10698 int i;
10699 tree exp;
10701 if (TREE_CODE (fn) == ADDR_EXPR)
10703 tree fndecl = TREE_OPERAND (fn, 0);
10704 if (TREE_CODE (fndecl) == FUNCTION_DECL
10705 && DECL_BUILT_IN (fndecl))
10707 /* If last argument is __builtin_va_arg_pack (), arguments to this
10708 function are not finalized yet. Defer folding until they are. */
10709 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10711 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10712 if (fndecl2
10713 && TREE_CODE (fndecl2) == FUNCTION_DECL
10714 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10715 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10716 return build_call_array (type, fn, n, argarray);
10718 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10720 tree arglist = NULL_TREE;
10721 for (i = n - 1; i >= 0; i--)
10722 arglist = tree_cons (NULL_TREE, argarray[i], arglist);
10723 ret = targetm.fold_builtin (fndecl, arglist, false);
10724 if (ret)
10725 return ret;
10727 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
10729 /* First try the transformations that don't require consing up
10730 an exp. */
10731 ret = fold_builtin_n (fndecl, argarray, n, false);
10732 if (ret)
10733 return ret;
10736 /* If we got this far, we need to build an exp. */
10737 exp = build_call_array (type, fn, n, argarray);
10738 ret = fold_builtin_varargs (fndecl, exp, false);
10739 return ret ? ret : exp;
10743 return build_call_array (type, fn, n, argarray);
10746 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10747 along with N new arguments specified as the "..." parameters. SKIP
10748 is the number of arguments in EXP to be omitted. This function is used
10749 to do varargs-to-varargs transformations. */
10751 static tree
10752 rewrite_call_expr (tree exp, int skip, tree fndecl, int n, ...)
10754 int oldnargs = call_expr_nargs (exp);
10755 int nargs = oldnargs - skip + n;
10756 tree fntype = TREE_TYPE (fndecl);
10757 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10758 tree *buffer;
10760 if (n > 0)
10762 int i, j;
10763 va_list ap;
10765 buffer = XALLOCAVEC (tree, nargs);
10766 va_start (ap, n);
10767 for (i = 0; i < n; i++)
10768 buffer[i] = va_arg (ap, tree);
10769 va_end (ap);
10770 for (j = skip; j < oldnargs; j++, i++)
10771 buffer[i] = CALL_EXPR_ARG (exp, j);
10773 else
10774 buffer = CALL_EXPR_ARGP (exp) + skip;
10776 return fold (build_call_array (TREE_TYPE (exp), fn, nargs, buffer));
10779 /* Validate a single argument ARG against a tree code CODE representing
10780 a type. */
10782 static bool
10783 validate_arg (const_tree arg, enum tree_code code)
10785 if (!arg)
10786 return false;
10787 else if (code == POINTER_TYPE)
10788 return POINTER_TYPE_P (TREE_TYPE (arg));
10789 else if (code == INTEGER_TYPE)
10790 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10791 return code == TREE_CODE (TREE_TYPE (arg));
10794 /* This function validates the types of a function call argument list
10795 against a specified list of tree_codes. If the last specifier is a 0,
10796 that represents an ellipses, otherwise the last specifier must be a
10797 VOID_TYPE. */
10799 bool
10800 validate_arglist (const_tree callexpr, ...)
10802 enum tree_code code;
10803 bool res = 0;
10804 va_list ap;
10805 const_call_expr_arg_iterator iter;
10806 const_tree arg;
10808 va_start (ap, callexpr);
10809 init_const_call_expr_arg_iterator (callexpr, &iter);
10813 code = va_arg (ap, enum tree_code);
10814 switch (code)
10816 case 0:
10817 /* This signifies an ellipses, any further arguments are all ok. */
10818 res = true;
10819 goto end;
10820 case VOID_TYPE:
10821 /* This signifies an endlink, if no arguments remain, return
10822 true, otherwise return false. */
10823 res = !more_const_call_expr_args_p (&iter);
10824 goto end;
10825 default:
10826 /* If no parameters remain or the parameter's code does not
10827 match the specified code, return false. Otherwise continue
10828 checking any remaining arguments. */
10829 arg = next_const_call_expr_arg (&iter);
10830 if (!validate_arg (arg, code))
10831 goto end;
10832 break;
10835 while (1);
10837 /* We need gotos here since we can only have one VA_CLOSE in a
10838 function. */
10839 end: ;
10840 va_end (ap);
10842 return res;
10845 /* Default target-specific builtin expander that does nothing. */
10848 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10849 rtx target ATTRIBUTE_UNUSED,
10850 rtx subtarget ATTRIBUTE_UNUSED,
10851 enum machine_mode mode ATTRIBUTE_UNUSED,
10852 int ignore ATTRIBUTE_UNUSED)
10854 return NULL_RTX;
10857 /* Returns true is EXP represents data that would potentially reside
10858 in a readonly section. */
10860 static bool
10861 readonly_data_expr (tree exp)
10863 STRIP_NOPS (exp);
10865 if (TREE_CODE (exp) != ADDR_EXPR)
10866 return false;
10868 exp = get_base_address (TREE_OPERAND (exp, 0));
10869 if (!exp)
10870 return false;
10872 /* Make sure we call decl_readonly_section only for trees it
10873 can handle (since it returns true for everything it doesn't
10874 understand). */
10875 if (TREE_CODE (exp) == STRING_CST
10876 || TREE_CODE (exp) == CONSTRUCTOR
10877 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10878 return decl_readonly_section (exp, 0);
10879 else
10880 return false;
10883 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10884 to the call, and TYPE is its return type.
10886 Return NULL_TREE if no simplification was possible, otherwise return the
10887 simplified form of the call as a tree.
10889 The simplified form may be a constant or other expression which
10890 computes the same value, but in a more efficient manner (including
10891 calls to other builtin functions).
10893 The call may contain arguments which need to be evaluated, but
10894 which are not useful to determine the result of the call. In
10895 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10896 COMPOUND_EXPR will be an argument which must be evaluated.
10897 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10898 COMPOUND_EXPR in the chain will contain the tree for the simplified
10899 form of the builtin function call. */
10901 static tree
10902 fold_builtin_strstr (tree s1, tree s2, tree type)
10904 if (!validate_arg (s1, POINTER_TYPE)
10905 || !validate_arg (s2, POINTER_TYPE))
10906 return NULL_TREE;
10907 else
10909 tree fn;
10910 const char *p1, *p2;
10912 p2 = c_getstr (s2);
10913 if (p2 == NULL)
10914 return NULL_TREE;
10916 p1 = c_getstr (s1);
10917 if (p1 != NULL)
10919 const char *r = strstr (p1, p2);
10920 tree tem;
10922 if (r == NULL)
10923 return build_int_cst (TREE_TYPE (s1), 0);
10925 /* Return an offset into the constant string argument. */
10926 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10927 s1, size_int (r - p1));
10928 return fold_convert (type, tem);
10931 /* The argument is const char *, and the result is char *, so we need
10932 a type conversion here to avoid a warning. */
10933 if (p2[0] == '\0')
10934 return fold_convert (type, s1);
10936 if (p2[1] != '\0')
10937 return NULL_TREE;
10939 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
10940 if (!fn)
10941 return NULL_TREE;
10943 /* New argument list transforming strstr(s1, s2) to
10944 strchr(s1, s2[0]). */
10945 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
10949 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10950 the call, and TYPE is its return type.
10952 Return NULL_TREE if no simplification was possible, otherwise return the
10953 simplified form of the call as a tree.
10955 The simplified form may be a constant or other expression which
10956 computes the same value, but in a more efficient manner (including
10957 calls to other builtin functions).
10959 The call may contain arguments which need to be evaluated, but
10960 which are not useful to determine the result of the call. In
10961 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10962 COMPOUND_EXPR will be an argument which must be evaluated.
10963 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10964 COMPOUND_EXPR in the chain will contain the tree for the simplified
10965 form of the builtin function call. */
10967 static tree
10968 fold_builtin_strchr (tree s1, tree s2, tree type)
10970 if (!validate_arg (s1, POINTER_TYPE)
10971 || !validate_arg (s2, INTEGER_TYPE))
10972 return NULL_TREE;
10973 else
10975 const char *p1;
10977 if (TREE_CODE (s2) != INTEGER_CST)
10978 return NULL_TREE;
10980 p1 = c_getstr (s1);
10981 if (p1 != NULL)
10983 char c;
10984 const char *r;
10985 tree tem;
10987 if (target_char_cast (s2, &c))
10988 return NULL_TREE;
10990 r = strchr (p1, c);
10992 if (r == NULL)
10993 return build_int_cst (TREE_TYPE (s1), 0);
10995 /* Return an offset into the constant string argument. */
10996 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
10997 s1, size_int (r - p1));
10998 return fold_convert (type, tem);
11000 return NULL_TREE;
11004 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11005 the call, and TYPE is its return type.
11007 Return NULL_TREE if no simplification was possible, otherwise return the
11008 simplified form of the call as a tree.
11010 The simplified form may be a constant or other expression which
11011 computes the same value, but in a more efficient manner (including
11012 calls to other builtin functions).
11014 The call may contain arguments which need to be evaluated, but
11015 which are not useful to determine the result of the call. In
11016 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11017 COMPOUND_EXPR will be an argument which must be evaluated.
11018 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11019 COMPOUND_EXPR in the chain will contain the tree for the simplified
11020 form of the builtin function call. */
11022 static tree
11023 fold_builtin_strrchr (tree s1, tree s2, tree type)
11025 if (!validate_arg (s1, POINTER_TYPE)
11026 || !validate_arg (s2, INTEGER_TYPE))
11027 return NULL_TREE;
11028 else
11030 tree fn;
11031 const char *p1;
11033 if (TREE_CODE (s2) != INTEGER_CST)
11034 return NULL_TREE;
11036 p1 = c_getstr (s1);
11037 if (p1 != NULL)
11039 char c;
11040 const char *r;
11041 tree tem;
11043 if (target_char_cast (s2, &c))
11044 return NULL_TREE;
11046 r = strrchr (p1, c);
11048 if (r == NULL)
11049 return build_int_cst (TREE_TYPE (s1), 0);
11051 /* Return an offset into the constant string argument. */
11052 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11053 s1, size_int (r - p1));
11054 return fold_convert (type, tem);
11057 if (! integer_zerop (s2))
11058 return NULL_TREE;
11060 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11061 if (!fn)
11062 return NULL_TREE;
11064 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11065 return build_call_expr (fn, 2, s1, s2);
11069 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11070 to the call, and TYPE is its return type.
11072 Return NULL_TREE if no simplification was possible, otherwise return the
11073 simplified form of the call as a tree.
11075 The simplified form may be a constant or other expression which
11076 computes the same value, but in a more efficient manner (including
11077 calls to other builtin functions).
11079 The call may contain arguments which need to be evaluated, but
11080 which are not useful to determine the result of the call. In
11081 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11082 COMPOUND_EXPR will be an argument which must be evaluated.
11083 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11084 COMPOUND_EXPR in the chain will contain the tree for the simplified
11085 form of the builtin function call. */
11087 static tree
11088 fold_builtin_strpbrk (tree s1, tree s2, tree type)
11090 if (!validate_arg (s1, POINTER_TYPE)
11091 || !validate_arg (s2, POINTER_TYPE))
11092 return NULL_TREE;
11093 else
11095 tree fn;
11096 const char *p1, *p2;
11098 p2 = c_getstr (s2);
11099 if (p2 == NULL)
11100 return NULL_TREE;
11102 p1 = c_getstr (s1);
11103 if (p1 != NULL)
11105 const char *r = strpbrk (p1, p2);
11106 tree tem;
11108 if (r == NULL)
11109 return build_int_cst (TREE_TYPE (s1), 0);
11111 /* Return an offset into the constant string argument. */
11112 tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
11113 s1, size_int (r - p1));
11114 return fold_convert (type, tem);
11117 if (p2[0] == '\0')
11118 /* strpbrk(x, "") == NULL.
11119 Evaluate and ignore s1 in case it had side-effects. */
11120 return omit_one_operand (TREE_TYPE (s1), integer_zero_node, s1);
11122 if (p2[1] != '\0')
11123 return NULL_TREE; /* Really call strpbrk. */
11125 fn = implicit_built_in_decls[BUILT_IN_STRCHR];
11126 if (!fn)
11127 return NULL_TREE;
11129 /* New argument list transforming strpbrk(s1, s2) to
11130 strchr(s1, s2[0]). */
11131 return build_call_expr (fn, 2, s1, build_int_cst (NULL_TREE, p2[0]));
11135 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11136 to the call.
11138 Return NULL_TREE if no simplification was possible, otherwise return the
11139 simplified form of the call as a tree.
11141 The simplified form may be a constant or other expression which
11142 computes the same value, but in a more efficient manner (including
11143 calls to other builtin functions).
11145 The call may contain arguments which need to be evaluated, but
11146 which are not useful to determine the result of the call. In
11147 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11148 COMPOUND_EXPR will be an argument which must be evaluated.
11149 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11150 COMPOUND_EXPR in the chain will contain the tree for the simplified
11151 form of the builtin function call. */
11153 static tree
11154 fold_builtin_strcat (tree dst, tree src)
11156 if (!validate_arg (dst, POINTER_TYPE)
11157 || !validate_arg (src, POINTER_TYPE))
11158 return NULL_TREE;
11159 else
11161 const char *p = c_getstr (src);
11163 /* If the string length is zero, return the dst parameter. */
11164 if (p && *p == '\0')
11165 return dst;
11167 return NULL_TREE;
11171 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11172 arguments to the call.
11174 Return NULL_TREE if no simplification was possible, otherwise return the
11175 simplified form of the call as a tree.
11177 The simplified form may be a constant or other expression which
11178 computes the same value, but in a more efficient manner (including
11179 calls to other builtin functions).
11181 The call may contain arguments which need to be evaluated, but
11182 which are not useful to determine the result of the call. In
11183 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11184 COMPOUND_EXPR will be an argument which must be evaluated.
11185 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11186 COMPOUND_EXPR in the chain will contain the tree for the simplified
11187 form of the builtin function call. */
11189 static tree
11190 fold_builtin_strncat (tree dst, tree src, tree len)
11192 if (!validate_arg (dst, POINTER_TYPE)
11193 || !validate_arg (src, POINTER_TYPE)
11194 || !validate_arg (len, INTEGER_TYPE))
11195 return NULL_TREE;
11196 else
11198 const char *p = c_getstr (src);
11200 /* If the requested length is zero, or the src parameter string
11201 length is zero, return the dst parameter. */
11202 if (integer_zerop (len) || (p && *p == '\0'))
11203 return omit_two_operands (TREE_TYPE (dst), dst, src, len);
11205 /* If the requested len is greater than or equal to the string
11206 length, call strcat. */
11207 if (TREE_CODE (len) == INTEGER_CST && p
11208 && compare_tree_int (len, strlen (p)) >= 0)
11210 tree fn = implicit_built_in_decls[BUILT_IN_STRCAT];
11212 /* If the replacement _DECL isn't initialized, don't do the
11213 transformation. */
11214 if (!fn)
11215 return NULL_TREE;
11217 return build_call_expr (fn, 2, dst, src);
11219 return NULL_TREE;
11223 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11224 to the call.
11226 Return NULL_TREE if no simplification was possible, otherwise return the
11227 simplified form of the call as a tree.
11229 The simplified form may be a constant or other expression which
11230 computes the same value, but in a more efficient manner (including
11231 calls to other builtin functions).
11233 The call may contain arguments which need to be evaluated, but
11234 which are not useful to determine the result of the call. In
11235 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11236 COMPOUND_EXPR will be an argument which must be evaluated.
11237 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11238 COMPOUND_EXPR in the chain will contain the tree for the simplified
11239 form of the builtin function call. */
11241 static tree
11242 fold_builtin_strspn (tree s1, tree s2)
11244 if (!validate_arg (s1, POINTER_TYPE)
11245 || !validate_arg (s2, POINTER_TYPE))
11246 return NULL_TREE;
11247 else
11249 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11251 /* If both arguments are constants, evaluate at compile-time. */
11252 if (p1 && p2)
11254 const size_t r = strspn (p1, p2);
11255 return size_int (r);
11258 /* If either argument is "", return NULL_TREE. */
11259 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11260 /* Evaluate and ignore both arguments in case either one has
11261 side-effects. */
11262 return omit_two_operands (integer_type_node, integer_zero_node,
11263 s1, s2);
11264 return NULL_TREE;
11268 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11269 to the call.
11271 Return NULL_TREE if no simplification was possible, otherwise return the
11272 simplified form of the call as a tree.
11274 The simplified form may be a constant or other expression which
11275 computes the same value, but in a more efficient manner (including
11276 calls to other builtin functions).
11278 The call may contain arguments which need to be evaluated, but
11279 which are not useful to determine the result of the call. In
11280 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11281 COMPOUND_EXPR will be an argument which must be evaluated.
11282 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11283 COMPOUND_EXPR in the chain will contain the tree for the simplified
11284 form of the builtin function call. */
11286 static tree
11287 fold_builtin_strcspn (tree s1, tree s2)
11289 if (!validate_arg (s1, POINTER_TYPE)
11290 || !validate_arg (s2, POINTER_TYPE))
11291 return NULL_TREE;
11292 else
11294 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11296 /* If both arguments are constants, evaluate at compile-time. */
11297 if (p1 && p2)
11299 const size_t r = strcspn (p1, p2);
11300 return size_int (r);
11303 /* If the first argument is "", return NULL_TREE. */
11304 if (p1 && *p1 == '\0')
11306 /* Evaluate and ignore argument s2 in case it has
11307 side-effects. */
11308 return omit_one_operand (integer_type_node,
11309 integer_zero_node, s2);
11312 /* If the second argument is "", return __builtin_strlen(s1). */
11313 if (p2 && *p2 == '\0')
11315 tree fn = implicit_built_in_decls[BUILT_IN_STRLEN];
11317 /* If the replacement _DECL isn't initialized, don't do the
11318 transformation. */
11319 if (!fn)
11320 return NULL_TREE;
11322 return build_call_expr (fn, 1, s1);
11324 return NULL_TREE;
11328 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11329 to the call. IGNORE is true if the value returned
11330 by the builtin will be ignored. UNLOCKED is true is true if this
11331 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11332 the known length of the string. Return NULL_TREE if no simplification
11333 was possible. */
11335 tree
11336 fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
11338 /* If we're using an unlocked function, assume the other unlocked
11339 functions exist explicitly. */
11340 tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
11341 : implicit_built_in_decls[BUILT_IN_FPUTC];
11342 tree const fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
11343 : implicit_built_in_decls[BUILT_IN_FWRITE];
11345 /* If the return value is used, don't do the transformation. */
11346 if (!ignore)
11347 return NULL_TREE;
11349 /* Verify the arguments in the original call. */
11350 if (!validate_arg (arg0, POINTER_TYPE)
11351 || !validate_arg (arg1, POINTER_TYPE))
11352 return NULL_TREE;
11354 if (! len)
11355 len = c_strlen (arg0, 0);
11357 /* Get the length of the string passed to fputs. If the length
11358 can't be determined, punt. */
11359 if (!len
11360 || TREE_CODE (len) != INTEGER_CST)
11361 return NULL_TREE;
11363 switch (compare_tree_int (len, 1))
11365 case -1: /* length is 0, delete the call entirely . */
11366 return omit_one_operand (integer_type_node, integer_zero_node, arg1);;
11368 case 0: /* length is 1, call fputc. */
11370 const char *p = c_getstr (arg0);
11372 if (p != NULL)
11374 if (fn_fputc)
11375 return build_call_expr (fn_fputc, 2,
11376 build_int_cst (NULL_TREE, p[0]), arg1);
11377 else
11378 return NULL_TREE;
11381 /* FALLTHROUGH */
11382 case 1: /* length is greater than 1, call fwrite. */
11384 /* If optimizing for size keep fputs. */
11385 if (optimize_size)
11386 return NULL_TREE;
11387 /* New argument list transforming fputs(string, stream) to
11388 fwrite(string, 1, len, stream). */
11389 if (fn_fwrite)
11390 return build_call_expr (fn_fwrite, 4, arg0, size_one_node, len, arg1);
11391 else
11392 return NULL_TREE;
11394 default:
11395 gcc_unreachable ();
11397 return NULL_TREE;
11400 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11401 produced. False otherwise. This is done so that we don't output the error
11402 or warning twice or three times. */
11403 bool
11404 fold_builtin_next_arg (tree exp, bool va_start_p)
11406 tree fntype = TREE_TYPE (current_function_decl);
11407 int nargs = call_expr_nargs (exp);
11408 tree arg;
11410 if (TYPE_ARG_TYPES (fntype) == 0
11411 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
11412 == void_type_node))
11414 error ("%<va_start%> used in function with fixed args");
11415 return true;
11418 if (va_start_p)
11420 if (va_start_p && (nargs != 2))
11422 error ("wrong number of arguments to function %<va_start%>");
11423 return true;
11425 arg = CALL_EXPR_ARG (exp, 1);
11427 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11428 when we checked the arguments and if needed issued a warning. */
11429 else
11431 if (nargs == 0)
11433 /* Evidently an out of date version of <stdarg.h>; can't validate
11434 va_start's second argument, but can still work as intended. */
11435 warning (0, "%<__builtin_next_arg%> called without an argument");
11436 return true;
11438 else if (nargs > 1)
11440 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11441 return true;
11443 arg = CALL_EXPR_ARG (exp, 0);
11446 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11447 or __builtin_next_arg (0) the first time we see it, after checking
11448 the arguments and if needed issuing a warning. */
11449 if (!integer_zerop (arg))
11451 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
11453 /* Strip off all nops for the sake of the comparison. This
11454 is not quite the same as STRIP_NOPS. It does more.
11455 We must also strip off INDIRECT_EXPR for C++ reference
11456 parameters. */
11457 while (CONVERT_EXPR_P (arg)
11458 || TREE_CODE (arg) == INDIRECT_REF)
11459 arg = TREE_OPERAND (arg, 0);
11460 if (arg != last_parm)
11462 /* FIXME: Sometimes with the tree optimizers we can get the
11463 not the last argument even though the user used the last
11464 argument. We just warn and set the arg to be the last
11465 argument so that we will get wrong-code because of
11466 it. */
11467 warning (0, "second parameter of %<va_start%> not last named argument");
11469 /* We want to verify the second parameter just once before the tree
11470 optimizers are run and then avoid keeping it in the tree,
11471 as otherwise we could warn even for correct code like:
11472 void foo (int i, ...)
11473 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11474 if (va_start_p)
11475 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11476 else
11477 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
11479 return false;
11483 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11484 ORIG may be null if this is a 2-argument call. We don't attempt to
11485 simplify calls with more than 3 arguments.
11487 Return NULL_TREE if no simplification was possible, otherwise return the
11488 simplified form of the call as a tree. If IGNORED is true, it means that
11489 the caller does not use the returned value of the function. */
11491 static tree
11492 fold_builtin_sprintf (tree dest, tree fmt, tree orig, int ignored)
11494 tree call, retval;
11495 const char *fmt_str = NULL;
11497 /* Verify the required arguments in the original call. We deal with two
11498 types of sprintf() calls: 'sprintf (str, fmt)' and
11499 'sprintf (dest, "%s", orig)'. */
11500 if (!validate_arg (dest, POINTER_TYPE)
11501 || !validate_arg (fmt, POINTER_TYPE))
11502 return NULL_TREE;
11503 if (orig && !validate_arg (orig, POINTER_TYPE))
11504 return NULL_TREE;
11506 /* Check whether the format is a literal string constant. */
11507 fmt_str = c_getstr (fmt);
11508 if (fmt_str == NULL)
11509 return NULL_TREE;
11511 call = NULL_TREE;
11512 retval = NULL_TREE;
11514 if (!init_target_chars ())
11515 return NULL_TREE;
11517 /* If the format doesn't contain % args or %%, use strcpy. */
11518 if (strchr (fmt_str, target_percent) == NULL)
11520 tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11522 if (!fn)
11523 return NULL_TREE;
11525 /* Don't optimize sprintf (buf, "abc", ptr++). */
11526 if (orig)
11527 return NULL_TREE;
11529 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11530 'format' is known to contain no % formats. */
11531 call = build_call_expr (fn, 2, dest, fmt);
11532 if (!ignored)
11533 retval = build_int_cst (NULL_TREE, strlen (fmt_str));
11536 /* If the format is "%s", use strcpy if the result isn't used. */
11537 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
11539 tree fn;
11540 fn = implicit_built_in_decls[BUILT_IN_STRCPY];
11542 if (!fn)
11543 return NULL_TREE;
11545 /* Don't crash on sprintf (str1, "%s"). */
11546 if (!orig)
11547 return NULL_TREE;
11549 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11550 if (!ignored)
11552 retval = c_strlen (orig, 1);
11553 if (!retval || TREE_CODE (retval) != INTEGER_CST)
11554 return NULL_TREE;
11556 call = build_call_expr (fn, 2, dest, orig);
11559 if (call && retval)
11561 retval = fold_convert
11562 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls[BUILT_IN_SPRINTF])),
11563 retval);
11564 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
11566 else
11567 return call;
11570 /* Expand a call EXP to __builtin_object_size. */
11573 expand_builtin_object_size (tree exp)
11575 tree ost;
11576 int object_size_type;
11577 tree fndecl = get_callee_fndecl (exp);
11579 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
11581 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11582 exp, fndecl);
11583 expand_builtin_trap ();
11584 return const0_rtx;
11587 ost = CALL_EXPR_ARG (exp, 1);
11588 STRIP_NOPS (ost);
11590 if (TREE_CODE (ost) != INTEGER_CST
11591 || tree_int_cst_sgn (ost) < 0
11592 || compare_tree_int (ost, 3) > 0)
11594 error ("%Klast argument of %D is not integer constant between 0 and 3",
11595 exp, fndecl);
11596 expand_builtin_trap ();
11597 return const0_rtx;
11600 object_size_type = tree_low_cst (ost, 0);
11602 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11605 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11606 FCODE is the BUILT_IN_* to use.
11607 Return NULL_RTX if we failed; the caller should emit a normal call,
11608 otherwise try to get the result in TARGET, if convenient (and in
11609 mode MODE if that's convenient). */
11611 static rtx
11612 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
11613 enum built_in_function fcode)
11615 tree dest, src, len, size;
11617 if (!validate_arglist (exp,
11618 POINTER_TYPE,
11619 fcode == BUILT_IN_MEMSET_CHK
11620 ? INTEGER_TYPE : POINTER_TYPE,
11621 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
11622 return NULL_RTX;
11624 dest = CALL_EXPR_ARG (exp, 0);
11625 src = CALL_EXPR_ARG (exp, 1);
11626 len = CALL_EXPR_ARG (exp, 2);
11627 size = CALL_EXPR_ARG (exp, 3);
11629 if (! host_integerp (size, 1))
11630 return NULL_RTX;
11632 if (host_integerp (len, 1) || integer_all_onesp (size))
11634 tree fn;
11636 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11638 warning (0, "%Kcall to %D will always overflow destination buffer",
11639 exp, get_callee_fndecl (exp));
11640 return NULL_RTX;
11643 fn = NULL_TREE;
11644 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11645 mem{cpy,pcpy,move,set} is available. */
11646 switch (fcode)
11648 case BUILT_IN_MEMCPY_CHK:
11649 fn = built_in_decls[BUILT_IN_MEMCPY];
11650 break;
11651 case BUILT_IN_MEMPCPY_CHK:
11652 fn = built_in_decls[BUILT_IN_MEMPCPY];
11653 break;
11654 case BUILT_IN_MEMMOVE_CHK:
11655 fn = built_in_decls[BUILT_IN_MEMMOVE];
11656 break;
11657 case BUILT_IN_MEMSET_CHK:
11658 fn = built_in_decls[BUILT_IN_MEMSET];
11659 break;
11660 default:
11661 break;
11664 if (! fn)
11665 return NULL_RTX;
11667 fn = build_call_expr (fn, 3, dest, src, len);
11668 STRIP_TYPE_NOPS (fn);
11669 while (TREE_CODE (fn) == COMPOUND_EXPR)
11671 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11672 EXPAND_NORMAL);
11673 fn = TREE_OPERAND (fn, 1);
11675 if (TREE_CODE (fn) == CALL_EXPR)
11676 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11677 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11679 else if (fcode == BUILT_IN_MEMSET_CHK)
11680 return NULL_RTX;
11681 else
11683 unsigned int dest_align
11684 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
11686 /* If DEST is not a pointer type, call the normal function. */
11687 if (dest_align == 0)
11688 return NULL_RTX;
11690 /* If SRC and DEST are the same (and not volatile), do nothing. */
11691 if (operand_equal_p (src, dest, 0))
11693 tree expr;
11695 if (fcode != BUILT_IN_MEMPCPY_CHK)
11697 /* Evaluate and ignore LEN in case it has side-effects. */
11698 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11699 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11702 expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11703 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11706 /* __memmove_chk special case. */
11707 if (fcode == BUILT_IN_MEMMOVE_CHK)
11709 unsigned int src_align
11710 = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
11712 if (src_align == 0)
11713 return NULL_RTX;
11715 /* If src is categorized for a readonly section we can use
11716 normal __memcpy_chk. */
11717 if (readonly_data_expr (src))
11719 tree fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11720 if (!fn)
11721 return NULL_RTX;
11722 fn = build_call_expr (fn, 4, dest, src, len, size);
11723 STRIP_TYPE_NOPS (fn);
11724 while (TREE_CODE (fn) == COMPOUND_EXPR)
11726 expand_expr (TREE_OPERAND (fn, 0), const0_rtx, VOIDmode,
11727 EXPAND_NORMAL);
11728 fn = TREE_OPERAND (fn, 1);
11730 if (TREE_CODE (fn) == CALL_EXPR)
11731 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11732 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11735 return NULL_RTX;
11739 /* Emit warning if a buffer overflow is detected at compile time. */
11741 static void
11742 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11744 int is_strlen = 0;
11745 tree len, size;
11747 switch (fcode)
11749 case BUILT_IN_STRCPY_CHK:
11750 case BUILT_IN_STPCPY_CHK:
11751 /* For __strcat_chk the warning will be emitted only if overflowing
11752 by at least strlen (dest) + 1 bytes. */
11753 case BUILT_IN_STRCAT_CHK:
11754 len = CALL_EXPR_ARG (exp, 1);
11755 size = CALL_EXPR_ARG (exp, 2);
11756 is_strlen = 1;
11757 break;
11758 case BUILT_IN_STRNCAT_CHK:
11759 case BUILT_IN_STRNCPY_CHK:
11760 len = CALL_EXPR_ARG (exp, 2);
11761 size = CALL_EXPR_ARG (exp, 3);
11762 break;
11763 case BUILT_IN_SNPRINTF_CHK:
11764 case BUILT_IN_VSNPRINTF_CHK:
11765 len = CALL_EXPR_ARG (exp, 1);
11766 size = CALL_EXPR_ARG (exp, 3);
11767 break;
11768 default:
11769 gcc_unreachable ();
11772 if (!len || !size)
11773 return;
11775 if (! host_integerp (size, 1) || integer_all_onesp (size))
11776 return;
11778 if (is_strlen)
11780 len = c_strlen (len, 1);
11781 if (! len || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11782 return;
11784 else if (fcode == BUILT_IN_STRNCAT_CHK)
11786 tree src = CALL_EXPR_ARG (exp, 1);
11787 if (! src || ! host_integerp (len, 1) || tree_int_cst_lt (len, size))
11788 return;
11789 src = c_strlen (src, 1);
11790 if (! src || ! host_integerp (src, 1))
11792 warning (0, "%Kcall to %D might overflow destination buffer",
11793 exp, get_callee_fndecl (exp));
11794 return;
11796 else if (tree_int_cst_lt (src, size))
11797 return;
11799 else if (! host_integerp (len, 1) || ! tree_int_cst_lt (size, len))
11800 return;
11802 warning (0, "%Kcall to %D will always overflow destination buffer",
11803 exp, get_callee_fndecl (exp));
11806 /* Emit warning if a buffer overflow is detected at compile time
11807 in __sprintf_chk/__vsprintf_chk calls. */
11809 static void
11810 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11812 tree dest, size, len, fmt, flag;
11813 const char *fmt_str;
11814 int nargs = call_expr_nargs (exp);
11816 /* Verify the required arguments in the original call. */
11818 if (nargs < 4)
11819 return;
11820 dest = CALL_EXPR_ARG (exp, 0);
11821 flag = CALL_EXPR_ARG (exp, 1);
11822 size = CALL_EXPR_ARG (exp, 2);
11823 fmt = CALL_EXPR_ARG (exp, 3);
11825 if (! host_integerp (size, 1) || integer_all_onesp (size))
11826 return;
11828 /* Check whether the format is a literal string constant. */
11829 fmt_str = c_getstr (fmt);
11830 if (fmt_str == NULL)
11831 return;
11833 if (!init_target_chars ())
11834 return;
11836 /* If the format doesn't contain % args or %%, we know its size. */
11837 if (strchr (fmt_str, target_percent) == 0)
11838 len = build_int_cstu (size_type_node, strlen (fmt_str));
11839 /* If the format is "%s" and first ... argument is a string literal,
11840 we know it too. */
11841 else if (fcode == BUILT_IN_SPRINTF_CHK
11842 && strcmp (fmt_str, target_percent_s) == 0)
11844 tree arg;
11846 if (nargs < 5)
11847 return;
11848 arg = CALL_EXPR_ARG (exp, 4);
11849 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11850 return;
11852 len = c_strlen (arg, 1);
11853 if (!len || ! host_integerp (len, 1))
11854 return;
11856 else
11857 return;
11859 if (! tree_int_cst_lt (len, size))
11861 warning (0, "%Kcall to %D will always overflow destination buffer",
11862 exp, get_callee_fndecl (exp));
11866 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11867 if possible. */
11869 tree
11870 fold_builtin_object_size (tree ptr, tree ost)
11872 tree ret = NULL_TREE;
11873 int object_size_type;
11875 if (!validate_arg (ptr, POINTER_TYPE)
11876 || !validate_arg (ost, INTEGER_TYPE))
11877 return NULL_TREE;
11879 STRIP_NOPS (ost);
11881 if (TREE_CODE (ost) != INTEGER_CST
11882 || tree_int_cst_sgn (ost) < 0
11883 || compare_tree_int (ost, 3) > 0)
11884 return NULL_TREE;
11886 object_size_type = tree_low_cst (ost, 0);
11888 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11889 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11890 and (size_t) 0 for types 2 and 3. */
11891 if (TREE_SIDE_EFFECTS (ptr))
11892 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11894 if (TREE_CODE (ptr) == ADDR_EXPR)
11895 ret = build_int_cstu (size_type_node,
11896 compute_builtin_object_size (ptr, object_size_type));
11898 else if (TREE_CODE (ptr) == SSA_NAME)
11900 unsigned HOST_WIDE_INT bytes;
11902 /* If object size is not known yet, delay folding until
11903 later. Maybe subsequent passes will help determining
11904 it. */
11905 bytes = compute_builtin_object_size (ptr, object_size_type);
11906 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2
11907 ? -1 : 0))
11908 ret = build_int_cstu (size_type_node, bytes);
11911 if (ret)
11913 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (ret);
11914 HOST_WIDE_INT high = TREE_INT_CST_HIGH (ret);
11915 if (fit_double_type (low, high, &low, &high, TREE_TYPE (ret)))
11916 ret = NULL_TREE;
11919 return ret;
11922 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11923 DEST, SRC, LEN, and SIZE are the arguments to the call.
11924 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11925 code of the builtin. If MAXLEN is not NULL, it is maximum length
11926 passed as third argument. */
11928 tree
11929 fold_builtin_memory_chk (tree fndecl,
11930 tree dest, tree src, tree len, tree size,
11931 tree maxlen, bool ignore,
11932 enum built_in_function fcode)
11934 tree fn;
11936 if (!validate_arg (dest, POINTER_TYPE)
11937 || !validate_arg (src,
11938 (fcode == BUILT_IN_MEMSET_CHK
11939 ? INTEGER_TYPE : POINTER_TYPE))
11940 || !validate_arg (len, INTEGER_TYPE)
11941 || !validate_arg (size, INTEGER_TYPE))
11942 return NULL_TREE;
11944 /* If SRC and DEST are the same (and not volatile), return DEST
11945 (resp. DEST+LEN for __mempcpy_chk). */
11946 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
11948 if (fcode != BUILT_IN_MEMPCPY_CHK)
11949 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
11950 else
11952 tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
11953 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
11957 if (! host_integerp (size, 1))
11958 return NULL_TREE;
11960 if (! integer_all_onesp (size))
11962 if (! host_integerp (len, 1))
11964 /* If LEN is not constant, try MAXLEN too.
11965 For MAXLEN only allow optimizing into non-_ocs function
11966 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11967 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
11969 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
11971 /* (void) __mempcpy_chk () can be optimized into
11972 (void) __memcpy_chk (). */
11973 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
11974 if (!fn)
11975 return NULL_TREE;
11977 return build_call_expr (fn, 4, dest, src, len, size);
11979 return NULL_TREE;
11982 else
11983 maxlen = len;
11985 if (tree_int_cst_lt (size, maxlen))
11986 return NULL_TREE;
11989 fn = NULL_TREE;
11990 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11991 mem{cpy,pcpy,move,set} is available. */
11992 switch (fcode)
11994 case BUILT_IN_MEMCPY_CHK:
11995 fn = built_in_decls[BUILT_IN_MEMCPY];
11996 break;
11997 case BUILT_IN_MEMPCPY_CHK:
11998 fn = built_in_decls[BUILT_IN_MEMPCPY];
11999 break;
12000 case BUILT_IN_MEMMOVE_CHK:
12001 fn = built_in_decls[BUILT_IN_MEMMOVE];
12002 break;
12003 case BUILT_IN_MEMSET_CHK:
12004 fn = built_in_decls[BUILT_IN_MEMSET];
12005 break;
12006 default:
12007 break;
12010 if (!fn)
12011 return NULL_TREE;
12013 return build_call_expr (fn, 3, dest, src, len);
12016 /* Fold a call to the __st[rp]cpy_chk builtin.
12017 DEST, SRC, and SIZE are the arguments to the call.
12018 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12019 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12020 strings passed as second argument. */
12022 tree
12023 fold_builtin_stxcpy_chk (tree fndecl, tree dest, tree src, tree size,
12024 tree maxlen, bool ignore,
12025 enum built_in_function fcode)
12027 tree len, fn;
12029 if (!validate_arg (dest, POINTER_TYPE)
12030 || !validate_arg (src, POINTER_TYPE)
12031 || !validate_arg (size, INTEGER_TYPE))
12032 return NULL_TREE;
12034 /* If SRC and DEST are the same (and not volatile), return DEST. */
12035 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12036 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), dest);
12038 if (! host_integerp (size, 1))
12039 return NULL_TREE;
12041 if (! integer_all_onesp (size))
12043 len = c_strlen (src, 1);
12044 if (! len || ! host_integerp (len, 1))
12046 /* If LEN is not constant, try MAXLEN too.
12047 For MAXLEN only allow optimizing into non-_ocs function
12048 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12049 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12051 if (fcode == BUILT_IN_STPCPY_CHK)
12053 if (! ignore)
12054 return NULL_TREE;
12056 /* If return value of __stpcpy_chk is ignored,
12057 optimize into __strcpy_chk. */
12058 fn = built_in_decls[BUILT_IN_STRCPY_CHK];
12059 if (!fn)
12060 return NULL_TREE;
12062 return build_call_expr (fn, 3, dest, src, size);
12065 if (! len || TREE_SIDE_EFFECTS (len))
12066 return NULL_TREE;
12068 /* If c_strlen returned something, but not a constant,
12069 transform __strcpy_chk into __memcpy_chk. */
12070 fn = built_in_decls[BUILT_IN_MEMCPY_CHK];
12071 if (!fn)
12072 return NULL_TREE;
12074 len = size_binop (PLUS_EXPR, len, ssize_int (1));
12075 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)),
12076 build_call_expr (fn, 4,
12077 dest, src, len, size));
12080 else
12081 maxlen = len;
12083 if (! tree_int_cst_lt (maxlen, size))
12084 return NULL_TREE;
12087 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12088 fn = built_in_decls[fcode == BUILT_IN_STPCPY_CHK
12089 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY];
12090 if (!fn)
12091 return NULL_TREE;
12093 return build_call_expr (fn, 2, dest, src);
12096 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12097 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12098 length passed as third argument. */
12100 tree
12101 fold_builtin_strncpy_chk (tree dest, tree src, tree len, tree size,
12102 tree maxlen)
12104 tree fn;
12106 if (!validate_arg (dest, POINTER_TYPE)
12107 || !validate_arg (src, POINTER_TYPE)
12108 || !validate_arg (len, INTEGER_TYPE)
12109 || !validate_arg (size, INTEGER_TYPE))
12110 return NULL_TREE;
12112 if (! host_integerp (size, 1))
12113 return NULL_TREE;
12115 if (! integer_all_onesp (size))
12117 if (! host_integerp (len, 1))
12119 /* If LEN is not constant, try MAXLEN too.
12120 For MAXLEN only allow optimizing into non-_ocs function
12121 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12122 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12123 return NULL_TREE;
12125 else
12126 maxlen = len;
12128 if (tree_int_cst_lt (size, maxlen))
12129 return NULL_TREE;
12132 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12133 fn = built_in_decls[BUILT_IN_STRNCPY];
12134 if (!fn)
12135 return NULL_TREE;
12137 return build_call_expr (fn, 3, dest, src, len);
12140 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12141 are the arguments to the call. */
12143 static tree
12144 fold_builtin_strcat_chk (tree fndecl, tree dest, tree src, tree size)
12146 tree fn;
12147 const char *p;
12149 if (!validate_arg (dest, POINTER_TYPE)
12150 || !validate_arg (src, POINTER_TYPE)
12151 || !validate_arg (size, INTEGER_TYPE))
12152 return NULL_TREE;
12154 p = c_getstr (src);
12155 /* If the SRC parameter is "", return DEST. */
12156 if (p && *p == '\0')
12157 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12159 if (! host_integerp (size, 1) || ! integer_all_onesp (size))
12160 return NULL_TREE;
12162 /* If __builtin_strcat_chk is used, assume strcat is available. */
12163 fn = built_in_decls[BUILT_IN_STRCAT];
12164 if (!fn)
12165 return NULL_TREE;
12167 return build_call_expr (fn, 2, dest, src);
12170 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12171 LEN, and SIZE. */
12173 static tree
12174 fold_builtin_strncat_chk (tree fndecl,
12175 tree dest, tree src, tree len, tree size)
12177 tree fn;
12178 const char *p;
12180 if (!validate_arg (dest, POINTER_TYPE)
12181 || !validate_arg (src, POINTER_TYPE)
12182 || !validate_arg (size, INTEGER_TYPE)
12183 || !validate_arg (size, INTEGER_TYPE))
12184 return NULL_TREE;
12186 p = c_getstr (src);
12187 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12188 if (p && *p == '\0')
12189 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
12190 else if (integer_zerop (len))
12191 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12193 if (! host_integerp (size, 1))
12194 return NULL_TREE;
12196 if (! integer_all_onesp (size))
12198 tree src_len = c_strlen (src, 1);
12199 if (src_len
12200 && host_integerp (src_len, 1)
12201 && host_integerp (len, 1)
12202 && ! tree_int_cst_lt (len, src_len))
12204 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12205 fn = built_in_decls[BUILT_IN_STRCAT_CHK];
12206 if (!fn)
12207 return NULL_TREE;
12209 return build_call_expr (fn, 3, dest, src, size);
12211 return NULL_TREE;
12214 /* If __builtin_strncat_chk is used, assume strncat is available. */
12215 fn = built_in_decls[BUILT_IN_STRNCAT];
12216 if (!fn)
12217 return NULL_TREE;
12219 return build_call_expr (fn, 3, dest, src, len);
12222 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12223 a normal call should be emitted rather than expanding the function
12224 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12226 static tree
12227 fold_builtin_sprintf_chk (tree exp, enum built_in_function fcode)
12229 tree dest, size, len, fn, fmt, flag;
12230 const char *fmt_str;
12231 int nargs = call_expr_nargs (exp);
12233 /* Verify the required arguments in the original call. */
12234 if (nargs < 4)
12235 return NULL_TREE;
12236 dest = CALL_EXPR_ARG (exp, 0);
12237 if (!validate_arg (dest, POINTER_TYPE))
12238 return NULL_TREE;
12239 flag = CALL_EXPR_ARG (exp, 1);
12240 if (!validate_arg (flag, INTEGER_TYPE))
12241 return NULL_TREE;
12242 size = CALL_EXPR_ARG (exp, 2);
12243 if (!validate_arg (size, INTEGER_TYPE))
12244 return NULL_TREE;
12245 fmt = CALL_EXPR_ARG (exp, 3);
12246 if (!validate_arg (fmt, POINTER_TYPE))
12247 return NULL_TREE;
12249 if (! host_integerp (size, 1))
12250 return NULL_TREE;
12252 len = NULL_TREE;
12254 if (!init_target_chars ())
12255 return NULL_TREE;
12257 /* Check whether the format is a literal string constant. */
12258 fmt_str = c_getstr (fmt);
12259 if (fmt_str != NULL)
12261 /* If the format doesn't contain % args or %%, we know the size. */
12262 if (strchr (fmt_str, target_percent) == 0)
12264 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
12265 len = build_int_cstu (size_type_node, strlen (fmt_str));
12267 /* If the format is "%s" and first ... argument is a string literal,
12268 we know the size too. */
12269 else if (fcode == BUILT_IN_SPRINTF_CHK
12270 && strcmp (fmt_str, target_percent_s) == 0)
12272 tree arg;
12274 if (nargs == 5)
12276 arg = CALL_EXPR_ARG (exp, 4);
12277 if (validate_arg (arg, POINTER_TYPE))
12279 len = c_strlen (arg, 1);
12280 if (! len || ! host_integerp (len, 1))
12281 len = NULL_TREE;
12287 if (! integer_all_onesp (size))
12289 if (! len || ! tree_int_cst_lt (len, size))
12290 return NULL_TREE;
12293 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12294 or if format doesn't contain % chars or is "%s". */
12295 if (! integer_zerop (flag))
12297 if (fmt_str == NULL)
12298 return NULL_TREE;
12299 if (strchr (fmt_str, target_percent) != NULL
12300 && strcmp (fmt_str, target_percent_s))
12301 return NULL_TREE;
12304 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12305 fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
12306 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
12307 if (!fn)
12308 return NULL_TREE;
12310 return rewrite_call_expr (exp, 4, fn, 2, dest, fmt);
12313 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12314 a normal call should be emitted rather than expanding the function
12315 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12316 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12317 passed as second argument. */
12319 tree
12320 fold_builtin_snprintf_chk (tree exp, tree maxlen,
12321 enum built_in_function fcode)
12323 tree dest, size, len, fn, fmt, flag;
12324 const char *fmt_str;
12326 /* Verify the required arguments in the original call. */
12327 if (call_expr_nargs (exp) < 5)
12328 return NULL_TREE;
12329 dest = CALL_EXPR_ARG (exp, 0);
12330 if (!validate_arg (dest, POINTER_TYPE))
12331 return NULL_TREE;
12332 len = CALL_EXPR_ARG (exp, 1);
12333 if (!validate_arg (len, INTEGER_TYPE))
12334 return NULL_TREE;
12335 flag = CALL_EXPR_ARG (exp, 2);
12336 if (!validate_arg (flag, INTEGER_TYPE))
12337 return NULL_TREE;
12338 size = CALL_EXPR_ARG (exp, 3);
12339 if (!validate_arg (size, INTEGER_TYPE))
12340 return NULL_TREE;
12341 fmt = CALL_EXPR_ARG (exp, 4);
12342 if (!validate_arg (fmt, POINTER_TYPE))
12343 return NULL_TREE;
12345 if (! host_integerp (size, 1))
12346 return NULL_TREE;
12348 if (! integer_all_onesp (size))
12350 if (! host_integerp (len, 1))
12352 /* If LEN is not constant, try MAXLEN too.
12353 For MAXLEN only allow optimizing into non-_ocs function
12354 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12355 if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
12356 return NULL_TREE;
12358 else
12359 maxlen = len;
12361 if (tree_int_cst_lt (size, maxlen))
12362 return NULL_TREE;
12365 if (!init_target_chars ())
12366 return NULL_TREE;
12368 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12369 or if format doesn't contain % chars or is "%s". */
12370 if (! integer_zerop (flag))
12372 fmt_str = c_getstr (fmt);
12373 if (fmt_str == NULL)
12374 return NULL_TREE;
12375 if (strchr (fmt_str, target_percent) != NULL
12376 && strcmp (fmt_str, target_percent_s))
12377 return NULL_TREE;
12380 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12381 available. */
12382 fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
12383 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
12384 if (!fn)
12385 return NULL_TREE;
12387 return rewrite_call_expr (exp, 5, fn, 3, dest, len, fmt);
12390 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12391 FMT and ARG are the arguments to the call; we don't fold cases with
12392 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12394 Return NULL_TREE if no simplification was possible, otherwise return the
12395 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12396 code of the function to be simplified. */
12398 static tree
12399 fold_builtin_printf (tree fndecl, tree fmt, tree arg, bool ignore,
12400 enum built_in_function fcode)
12402 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
12403 const char *fmt_str = NULL;
12405 /* If the return value is used, don't do the transformation. */
12406 if (! ignore)
12407 return NULL_TREE;
12409 /* Verify the required arguments in the original call. */
12410 if (!validate_arg (fmt, POINTER_TYPE))
12411 return NULL_TREE;
12413 /* Check whether the format is a literal string constant. */
12414 fmt_str = c_getstr (fmt);
12415 if (fmt_str == NULL)
12416 return NULL_TREE;
12418 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
12420 /* If we're using an unlocked function, assume the other
12421 unlocked functions exist explicitly. */
12422 fn_putchar = built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED];
12423 fn_puts = built_in_decls[BUILT_IN_PUTS_UNLOCKED];
12425 else
12427 fn_putchar = implicit_built_in_decls[BUILT_IN_PUTCHAR];
12428 fn_puts = implicit_built_in_decls[BUILT_IN_PUTS];
12431 if (!init_target_chars ())
12432 return NULL_TREE;
12434 if (strcmp (fmt_str, target_percent_s) == 0
12435 || strchr (fmt_str, target_percent) == NULL)
12437 const char *str;
12439 if (strcmp (fmt_str, target_percent_s) == 0)
12441 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12442 return NULL_TREE;
12444 if (!arg || !validate_arg (arg, POINTER_TYPE))
12445 return NULL_TREE;
12447 str = c_getstr (arg);
12448 if (str == NULL)
12449 return NULL_TREE;
12451 else
12453 /* The format specifier doesn't contain any '%' characters. */
12454 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
12455 && arg)
12456 return NULL_TREE;
12457 str = fmt_str;
12460 /* If the string was "", printf does nothing. */
12461 if (str[0] == '\0')
12462 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12464 /* If the string has length of 1, call putchar. */
12465 if (str[1] == '\0')
12467 /* Given printf("c"), (where c is any one character,)
12468 convert "c"[0] to an int and pass that to the replacement
12469 function. */
12470 newarg = build_int_cst (NULL_TREE, str[0]);
12471 if (fn_putchar)
12472 call = build_call_expr (fn_putchar, 1, newarg);
12474 else
12476 /* If the string was "string\n", call puts("string"). */
12477 size_t len = strlen (str);
12478 if ((unsigned char)str[len - 1] == target_newline)
12480 /* Create a NUL-terminated string that's one char shorter
12481 than the original, stripping off the trailing '\n'. */
12482 char *newstr = XALLOCAVEC (char, len);
12483 memcpy (newstr, str, len - 1);
12484 newstr[len - 1] = 0;
12486 newarg = build_string_literal (len, newstr);
12487 if (fn_puts)
12488 call = build_call_expr (fn_puts, 1, newarg);
12490 else
12491 /* We'd like to arrange to call fputs(string,stdout) here,
12492 but we need stdout and don't have a way to get it yet. */
12493 return NULL_TREE;
12497 /* The other optimizations can be done only on the non-va_list variants. */
12498 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
12499 return NULL_TREE;
12501 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12502 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
12504 if (!arg || !validate_arg (arg, POINTER_TYPE))
12505 return NULL_TREE;
12506 if (fn_puts)
12507 call = build_call_expr (fn_puts, 1, arg);
12510 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12511 else if (strcmp (fmt_str, target_percent_c) == 0)
12513 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12514 return NULL_TREE;
12515 if (fn_putchar)
12516 call = build_call_expr (fn_putchar, 1, arg);
12519 if (!call)
12520 return NULL_TREE;
12522 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12525 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12526 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12527 more than 3 arguments, and ARG may be null in the 2-argument case.
12529 Return NULL_TREE if no simplification was possible, otherwise return the
12530 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12531 code of the function to be simplified. */
12533 static tree
12534 fold_builtin_fprintf (tree fndecl, tree fp, tree fmt, tree arg, bool ignore,
12535 enum built_in_function fcode)
12537 tree fn_fputc, fn_fputs, call = NULL_TREE;
12538 const char *fmt_str = NULL;
12540 /* If the return value is used, don't do the transformation. */
12541 if (! ignore)
12542 return NULL_TREE;
12544 /* Verify the required arguments in the original call. */
12545 if (!validate_arg (fp, POINTER_TYPE))
12546 return NULL_TREE;
12547 if (!validate_arg (fmt, POINTER_TYPE))
12548 return NULL_TREE;
12550 /* Check whether the format is a literal string constant. */
12551 fmt_str = c_getstr (fmt);
12552 if (fmt_str == NULL)
12553 return NULL_TREE;
12555 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
12557 /* If we're using an unlocked function, assume the other
12558 unlocked functions exist explicitly. */
12559 fn_fputc = built_in_decls[BUILT_IN_FPUTC_UNLOCKED];
12560 fn_fputs = built_in_decls[BUILT_IN_FPUTS_UNLOCKED];
12562 else
12564 fn_fputc = implicit_built_in_decls[BUILT_IN_FPUTC];
12565 fn_fputs = implicit_built_in_decls[BUILT_IN_FPUTS];
12568 if (!init_target_chars ())
12569 return NULL_TREE;
12571 /* If the format doesn't contain % args or %%, use strcpy. */
12572 if (strchr (fmt_str, target_percent) == NULL)
12574 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
12575 && arg)
12576 return NULL_TREE;
12578 /* If the format specifier was "", fprintf does nothing. */
12579 if (fmt_str[0] == '\0')
12581 /* If FP has side-effects, just wait until gimplification is
12582 done. */
12583 if (TREE_SIDE_EFFECTS (fp))
12584 return NULL_TREE;
12586 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
12589 /* When "string" doesn't contain %, replace all cases of
12590 fprintf (fp, string) with fputs (string, fp). The fputs
12591 builtin will take care of special cases like length == 1. */
12592 if (fn_fputs)
12593 call = build_call_expr (fn_fputs, 2, fmt, fp);
12596 /* The other optimizations can be done only on the non-va_list variants. */
12597 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
12598 return NULL_TREE;
12600 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12601 else if (strcmp (fmt_str, target_percent_s) == 0)
12603 if (!arg || !validate_arg (arg, POINTER_TYPE))
12604 return NULL_TREE;
12605 if (fn_fputs)
12606 call = build_call_expr (fn_fputs, 2, arg, fp);
12609 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12610 else if (strcmp (fmt_str, target_percent_c) == 0)
12612 if (!arg || !validate_arg (arg, INTEGER_TYPE))
12613 return NULL_TREE;
12614 if (fn_fputc)
12615 call = build_call_expr (fn_fputc, 2, arg, fp);
12618 if (!call)
12619 return NULL_TREE;
12620 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), call);
12623 /* Initialize format string characters in the target charset. */
12625 static bool
12626 init_target_chars (void)
12628 static bool init;
12629 if (!init)
12631 target_newline = lang_hooks.to_target_charset ('\n');
12632 target_percent = lang_hooks.to_target_charset ('%');
12633 target_c = lang_hooks.to_target_charset ('c');
12634 target_s = lang_hooks.to_target_charset ('s');
12635 if (target_newline == 0 || target_percent == 0 || target_c == 0
12636 || target_s == 0)
12637 return false;
12639 target_percent_c[0] = target_percent;
12640 target_percent_c[1] = target_c;
12641 target_percent_c[2] = '\0';
12643 target_percent_s[0] = target_percent;
12644 target_percent_s[1] = target_s;
12645 target_percent_s[2] = '\0';
12647 target_percent_s_newline[0] = target_percent;
12648 target_percent_s_newline[1] = target_s;
12649 target_percent_s_newline[2] = target_newline;
12650 target_percent_s_newline[3] = '\0';
12652 init = true;
12654 return true;
12657 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12658 and no overflow/underflow occurred. INEXACT is true if M was not
12659 exactly calculated. TYPE is the tree type for the result. This
12660 function assumes that you cleared the MPFR flags and then
12661 calculated M to see if anything subsequently set a flag prior to
12662 entering this function. Return NULL_TREE if any checks fail. */
12664 static tree
12665 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
12667 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12668 overflow/underflow occurred. If -frounding-math, proceed iff the
12669 result of calling FUNC was exact. */
12670 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12671 && (!flag_rounding_math || !inexact))
12673 REAL_VALUE_TYPE rr;
12675 real_from_mpfr (&rr, m, type, GMP_RNDN);
12676 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12677 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12678 but the mpft_t is not, then we underflowed in the
12679 conversion. */
12680 if (real_isfinite (&rr)
12681 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
12683 REAL_VALUE_TYPE rmode;
12685 real_convert (&rmode, TYPE_MODE (type), &rr);
12686 /* Proceed iff the specified mode can hold the value. */
12687 if (real_identical (&rmode, &rr))
12688 return build_real (type, rmode);
12691 return NULL_TREE;
12694 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12695 FUNC on it and return the resulting value as a tree with type TYPE.
12696 If MIN and/or MAX are not NULL, then the supplied ARG must be
12697 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12698 acceptable values, otherwise they are not. The mpfr precision is
12699 set to the precision of TYPE. We assume that function FUNC returns
12700 zero if the result could be calculated exactly within the requested
12701 precision. */
12703 static tree
12704 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
12705 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
12706 bool inclusive)
12708 tree result = NULL_TREE;
12710 STRIP_NOPS (arg);
12712 /* To proceed, MPFR must exactly represent the target floating point
12713 format, which only happens when the target base equals two. */
12714 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12715 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
12717 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12719 if (real_isfinite (ra)
12720 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
12721 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
12723 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12724 int inexact;
12725 mpfr_t m;
12727 mpfr_init2 (m, prec);
12728 mpfr_from_real (m, ra, GMP_RNDN);
12729 mpfr_clear_flags ();
12730 inexact = func (m, m, GMP_RNDN);
12731 result = do_mpfr_ckconv (m, type, inexact);
12732 mpfr_clear (m);
12736 return result;
12739 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12740 FUNC on it and return the resulting value as a tree with type TYPE.
12741 The mpfr precision is set to the precision of TYPE. We assume that
12742 function FUNC returns zero if the result could be calculated
12743 exactly within the requested precision. */
12745 static tree
12746 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
12747 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12749 tree result = NULL_TREE;
12751 STRIP_NOPS (arg1);
12752 STRIP_NOPS (arg2);
12754 /* To proceed, MPFR must exactly represent the target floating point
12755 format, which only happens when the target base equals two. */
12756 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12757 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12758 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12760 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12761 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12763 if (real_isfinite (ra1) && real_isfinite (ra2))
12765 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12766 int inexact;
12767 mpfr_t m1, m2;
12769 mpfr_inits2 (prec, m1, m2, NULL);
12770 mpfr_from_real (m1, ra1, GMP_RNDN);
12771 mpfr_from_real (m2, ra2, GMP_RNDN);
12772 mpfr_clear_flags ();
12773 inexact = func (m1, m1, m2, GMP_RNDN);
12774 result = do_mpfr_ckconv (m1, type, inexact);
12775 mpfr_clears (m1, m2, NULL);
12779 return result;
12782 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12783 FUNC on it and return the resulting value as a tree with type TYPE.
12784 The mpfr precision is set to the precision of TYPE. We assume that
12785 function FUNC returns zero if the result could be calculated
12786 exactly within the requested precision. */
12788 static tree
12789 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
12790 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
12792 tree result = NULL_TREE;
12794 STRIP_NOPS (arg1);
12795 STRIP_NOPS (arg2);
12796 STRIP_NOPS (arg3);
12798 /* To proceed, MPFR must exactly represent the target floating point
12799 format, which only happens when the target base equals two. */
12800 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12801 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
12802 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
12803 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
12805 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
12806 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
12807 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
12809 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
12811 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12812 int inexact;
12813 mpfr_t m1, m2, m3;
12815 mpfr_inits2 (prec, m1, m2, m3, NULL);
12816 mpfr_from_real (m1, ra1, GMP_RNDN);
12817 mpfr_from_real (m2, ra2, GMP_RNDN);
12818 mpfr_from_real (m3, ra3, GMP_RNDN);
12819 mpfr_clear_flags ();
12820 inexact = func (m1, m1, m2, m3, GMP_RNDN);
12821 result = do_mpfr_ckconv (m1, type, inexact);
12822 mpfr_clears (m1, m2, m3, NULL);
12826 return result;
12829 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12830 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12831 If ARG_SINP and ARG_COSP are NULL then the result is returned
12832 as a complex value.
12833 The type is taken from the type of ARG and is used for setting the
12834 precision of the calculation and results. */
12836 static tree
12837 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
12839 tree const type = TREE_TYPE (arg);
12840 tree result = NULL_TREE;
12842 STRIP_NOPS (arg);
12844 /* To proceed, MPFR must exactly represent the target floating point
12845 format, which only happens when the target base equals two. */
12846 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12847 && TREE_CODE (arg) == REAL_CST
12848 && !TREE_OVERFLOW (arg))
12850 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
12852 if (real_isfinite (ra))
12854 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12855 tree result_s, result_c;
12856 int inexact;
12857 mpfr_t m, ms, mc;
12859 mpfr_inits2 (prec, m, ms, mc, NULL);
12860 mpfr_from_real (m, ra, GMP_RNDN);
12861 mpfr_clear_flags ();
12862 inexact = mpfr_sin_cos (ms, mc, m, GMP_RNDN);
12863 result_s = do_mpfr_ckconv (ms, type, inexact);
12864 result_c = do_mpfr_ckconv (mc, type, inexact);
12865 mpfr_clears (m, ms, mc, NULL);
12866 if (result_s && result_c)
12868 /* If we are to return in a complex value do so. */
12869 if (!arg_sinp && !arg_cosp)
12870 return build_complex (build_complex_type (type),
12871 result_c, result_s);
12873 /* Dereference the sin/cos pointer arguments. */
12874 arg_sinp = build_fold_indirect_ref (arg_sinp);
12875 arg_cosp = build_fold_indirect_ref (arg_cosp);
12876 /* Proceed if valid pointer type were passed in. */
12877 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12878 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12880 /* Set the values. */
12881 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
12882 result_s);
12883 TREE_SIDE_EFFECTS (result_s) = 1;
12884 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
12885 result_c);
12886 TREE_SIDE_EFFECTS (result_c) = 1;
12887 /* Combine the assignments into a compound expr. */
12888 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12889 result_s, result_c));
12894 return result;
12897 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12898 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12899 two-argument mpfr order N Bessel function FUNC on them and return
12900 the resulting value as a tree with type TYPE. The mpfr precision
12901 is set to the precision of TYPE. We assume that function FUNC
12902 returns zero if the result could be calculated exactly within the
12903 requested precision. */
12904 static tree
12905 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12906 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12907 const REAL_VALUE_TYPE *min, bool inclusive)
12909 tree result = NULL_TREE;
12911 STRIP_NOPS (arg1);
12912 STRIP_NOPS (arg2);
12914 /* To proceed, MPFR must exactly represent the target floating point
12915 format, which only happens when the target base equals two. */
12916 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12917 && host_integerp (arg1, 0)
12918 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12920 const HOST_WIDE_INT n = tree_low_cst(arg1, 0);
12921 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12923 if (n == (long)n
12924 && real_isfinite (ra)
12925 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12927 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12928 int inexact;
12929 mpfr_t m;
12931 mpfr_init2 (m, prec);
12932 mpfr_from_real (m, ra, GMP_RNDN);
12933 mpfr_clear_flags ();
12934 inexact = func (m, n, m, GMP_RNDN);
12935 result = do_mpfr_ckconv (m, type, inexact);
12936 mpfr_clear (m);
12940 return result;
12943 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12944 the pointer *(ARG_QUO) and return the result. The type is taken
12945 from the type of ARG0 and is used for setting the precision of the
12946 calculation and results. */
12948 static tree
12949 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12951 tree const type = TREE_TYPE (arg0);
12952 tree result = NULL_TREE;
12954 STRIP_NOPS (arg0);
12955 STRIP_NOPS (arg1);
12957 /* To proceed, MPFR must exactly represent the target floating point
12958 format, which only happens when the target base equals two. */
12959 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12960 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12961 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12963 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12964 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12966 if (real_isfinite (ra0) && real_isfinite (ra1))
12968 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
12969 tree result_rem;
12970 long integer_quo;
12971 mpfr_t m0, m1;
12973 mpfr_inits2 (prec, m0, m1, NULL);
12974 mpfr_from_real (m0, ra0, GMP_RNDN);
12975 mpfr_from_real (m1, ra1, GMP_RNDN);
12976 mpfr_clear_flags ();
12977 mpfr_remquo (m0, &integer_quo, m0, m1, GMP_RNDN);
12978 /* Remquo is independent of the rounding mode, so pass
12979 inexact=0 to do_mpfr_ckconv(). */
12980 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12981 mpfr_clears (m0, m1, NULL);
12982 if (result_rem)
12984 /* MPFR calculates quo in the host's long so it may
12985 return more bits in quo than the target int can hold
12986 if sizeof(host long) > sizeof(target int). This can
12987 happen even for native compilers in LP64 mode. In
12988 these cases, modulo the quo value with the largest
12989 number that the target int can hold while leaving one
12990 bit for the sign. */
12991 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12992 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12994 /* Dereference the quo pointer argument. */
12995 arg_quo = build_fold_indirect_ref (arg_quo);
12996 /* Proceed iff a valid pointer type was passed in. */
12997 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12999 /* Set the value. */
13000 tree result_quo = fold_build2 (MODIFY_EXPR,
13001 TREE_TYPE (arg_quo), arg_quo,
13002 build_int_cst (NULL, integer_quo));
13003 TREE_SIDE_EFFECTS (result_quo) = 1;
13004 /* Combine the quo assignment with the rem. */
13005 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13006 result_quo, result_rem));
13011 return result;
13014 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13015 resulting value as a tree with type TYPE. The mpfr precision is
13016 set to the precision of TYPE. We assume that this mpfr function
13017 returns zero if the result could be calculated exactly within the
13018 requested precision. In addition, the integer pointer represented
13019 by ARG_SG will be dereferenced and set to the appropriate signgam
13020 (-1,1) value. */
13022 static tree
13023 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13025 tree result = NULL_TREE;
13027 STRIP_NOPS (arg);
13029 /* To proceed, MPFR must exactly represent the target floating point
13030 format, which only happens when the target base equals two. Also
13031 verify ARG is a constant and that ARG_SG is an int pointer. */
13032 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13033 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
13034 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
13035 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
13037 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
13039 /* In addition to NaN and Inf, the argument cannot be zero or a
13040 negative integer. */
13041 if (real_isfinite (ra)
13042 && ra->cl != rvc_zero
13043 && !(real_isneg(ra) && real_isinteger(ra, TYPE_MODE (type))))
13045 const int prec = REAL_MODE_FORMAT (TYPE_MODE (type))->p;
13046 int inexact, sg;
13047 mpfr_t m;
13048 tree result_lg;
13050 mpfr_init2 (m, prec);
13051 mpfr_from_real (m, ra, GMP_RNDN);
13052 mpfr_clear_flags ();
13053 inexact = mpfr_lgamma (m, &sg, m, GMP_RNDN);
13054 result_lg = do_mpfr_ckconv (m, type, inexact);
13055 mpfr_clear (m);
13056 if (result_lg)
13058 tree result_sg;
13060 /* Dereference the arg_sg pointer argument. */
13061 arg_sg = build_fold_indirect_ref (arg_sg);
13062 /* Assign the signgam value into *arg_sg. */
13063 result_sg = fold_build2 (MODIFY_EXPR,
13064 TREE_TYPE (arg_sg), arg_sg,
13065 build_int_cst (NULL, sg));
13066 TREE_SIDE_EFFECTS (result_sg) = 1;
13067 /* Combine the signgam assignment with the lgamma result. */
13068 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13069 result_sg, result_lg));
13074 return result;
13076 #endif